+ ./ya make -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build relwithdebinfo -DDEBUGINFO_LINES_ONLY --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.0ikBI4XCTK --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest . --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-relwithdebinfo Configuring dependencies for platform tools Warn[-WPluginErr]: in $B/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium: Requirement ram is redefined 16 -> 28 Warn[-WPluginErr]: in $B/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch: Requirement ram is redefined 16 -> 28 Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [9785/9786 modules configured] [0 ymakes processing] [9791/9791 modules configured] [0 ymakes processing] [9791/9791 modules configured] [5089/5089 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 2.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a | 0.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a | 0.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a | 0.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a | 0.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a | 0.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/storages_manager/manager.cpp | 1.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace | 1.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a | 1.8%| PREPARE $(VCS) | 4.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a | 5.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a | 6.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a | 7.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a | 8.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a | 8.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a | 7.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a | 7.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a | 8.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a | 8.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a | 9.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |10.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |10.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |10.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/operations/write_data.cpp |10.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |11.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/tx_controller.h_serialized.cpp |19.7%| PREPARE $(YMAKE_PYTHON3-212672652) - 8.40 MB |22.2%| PREPARE $(LLD_ROOT-3107549726) - 33.57 MB |31.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |47.2%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |47.3%| [CF] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/chaos_lease_base.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_file_client.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_file_session.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |47.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/session/destination.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |47.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_finish_async.cpp |47.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/write.cpp |47.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_scan.cpp |47.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/tablet/ext_tx_base.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/chaos_lease.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/locks_db.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/target_cluster_injecting_channel.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |47.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/slice_builder/builder.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/operations/manager.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |47.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/tx_controller.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/common.h_serialized.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |48.5%| PREPARE $(OS_SDK_ROOT-sbr:243881345) - 6.39 MB |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |48.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/slice_builder/pack_builder.cpp |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a |48.4%| PREPARE $(CLANG-359471697) - 310.39 MB |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |48.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_ack_from_initiator.cpp |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |48.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/build_info.cpp |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |48.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svn_interface.c |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |48.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/propose_tx.cpp |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |48.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_add_sharding_info.cpp |48.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/primary.cpp |48.3%| PREPARE $(CLANG20-867705943) |48.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_start_from_initiator.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |48.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/tablet/write_queue.cpp |48.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_data_from_source.cpp |48.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_from_source.cpp |48.6%| [BI] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/buildinfo_data.h |47.8%| PREPARE $(CLANG_FORMAT-3815817643) |47.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/secondary.cpp |48.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/svnversion/svnversion.cpp |48.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/library/cpp/build_info/build_info_static.cpp |48.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |48.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/operations/events.cpp |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |48.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/common/session/common.cpp |48.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/sync.cpp |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |48.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/sharing.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.global.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |48.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/sandbox.cpp |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/regex/libv1-lexer-regex.a |48.7%| PREPARE $(RUFF-3583455953) - 8.40 MB |48.7%| PREPARE $(BLACK-1356850112) - 8.40 MB |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/core/libv1-complete-core.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/antlr4/libv1-complete-antlr4.a |48.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/simple.cpp |48.6%| PREPARE $(FLAKE8_PY3-3596799299) - 16.79 MB |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/libcomplete-name-object.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tools/join_perf/libkqp-tools-join_perf.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/static/libobject-simple-static.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/binding/libname-service-binding.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/libcomplete-name-service.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/column/libname-service-column.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/global/libcomplete-analysis-global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/schema/libname-service-schema.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.global.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/union/libname-service-union.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |48.9%| PREPARE $(TEST_TOOL_HOST-sbr:10164110401) - 25.18 MB |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |49.0%| PREPARE $(PYTHON) - 41.96 MB |49.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/schema.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |49.1%| [CP] {default-linux-x86_64, relwithdebinfo} $(B)/common_test.context |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tools/combiner_perf/libkqp-tools-combiner_perf.a |49.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_internal_scan.cpp |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/tcmalloc/libcpp-malloc-tcmalloc.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/yajl/libcontrib-deprecated-yajl.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |49.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/backup.cpp |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.global.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/libcontrib-restricted-abseil-cpp-tstring.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/libcontrib-restricted-abseil-cpp.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/proto/libactor-cloud_events-proto.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/concurrent_hash/libcpp-containers-concurrent_hash.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cron_expression/liblibrary-cpp-cron_expression.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |50.6%| PREPARE $(FLAKE8_PY2-2255386470) - 8.42 MB |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/async/liblibrary-actors-async.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/ut/utils/libcomp_nodes-ut-utils.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |51.0%| PREPARE $(CLANG-1922233694) - 209.73 MB |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/address/libactors-interconnect-address.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |51.2%| PREPARE $(CLANG16-1380963495) - 285.25 MB |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/poller/libactors-interconnect-poller.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/rdma/libactors-interconnect-rdma.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/tz/libcpp-type_info-tz.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/dump_helpers/libyt-lib-dump_helpers.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |51.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/abstract.cpp |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |51.5%| PREPARE $(JDK17-2548586558) - 176.18 MB |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |51.6%| PREPARE $(JDK_DEFAULT-2548586558) |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |51.7%| PREPARE $(CLANG18-1866954364) - 318.80 MB |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |51.7%| PREPARE $(WITH_JDK-sbr:9470949154) |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |51.7%| PREPARE $(WITH_JDK17-sbr:9470949154) |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split_antlr4/libproto_ast-gen-v1_proto_split_antlr4.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/full_capture/libyt-lib-full_capture.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/langver/libessentials-public-langver.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/metadata/libblobstorage-pdisk-metadata.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/global_plugins/libydb-library-global_plugins.a |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/counters_info/libydb-core-counters_info.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/base/libcontrol-lib-base.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/plan2svg/libydb-library-plan2svg.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/rows/libformats-arrow-rows.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/range_treap/libydb-library-range_treap.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/utils/liblibs-control_plane_proxy-utils.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signals/libydb-library-signals.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/common/liblibs-row_dispatcher-common.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/events/liblibs-checkpointing-events.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/helpers/libkqp-compile_service-helpers.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/result_set_format/libkqp-common-result_set_format.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/common/libbehaviour-streaming_query-common.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/common/libbrotli-c-common.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/proxy/libpersqueue-common-proxy.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/codecs/libpersqueue-public-codecs.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/blob/libpersqueue-pqtablet-blob.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/libdq-comp_nodes-hash_join_utils.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_index_generator/libpersqueue-public-partition_index_generator.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/write_meta/libpersqueue-public-write_meta.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/libdq-comp_nodes-llvm16.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libcore-protos-nbs.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/token_manager/libcore-security-token_manager.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/streaming_queries/libcore-sys_view-streaming_queries.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/dec/libbrotli-c-dec.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/diagnostics/libtx-columnshard-diagnostics.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |54.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |55.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/manager/shared_blobs.cpp |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/merge_subset.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |55.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/tracing/libengines-reader-tracing.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/script_executions_utils/libkqp-proxy_service-script_executions_utils.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libssh2/libcontrib-libs-libssh2.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |55.4%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |55.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |55.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/export/session/control.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/export/actor/write.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/events.cpp |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/put_status.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/export/session/task.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/write_controller.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/blob_constructor.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/compacted_blob_constructor.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |55.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/background_controller.cpp |55.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blob_cache.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blob.cpp |54.8%| PREPARE $(GDB) - 25.18 MB |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/ttl.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/manager/sessions.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/loading/stages.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/with_appended.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_tables.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/read_metadata.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/columnshard.h_serialized.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/compaction.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_portions.cpp |55.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/general_compaction.cpp |55.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__progress_tx.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/read_metadata.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__overload.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/actor2.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/session/session.cpp |54.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/constructor.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/columnshard_private_events.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__plan_step.cpp |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__statistics.cpp |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/modification/transactions/tx_change_blobs_owning.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__init.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__notify_tx_completion.cpp |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/writer/indexed_blob_constructor.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__locks.cpp |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/export/actor/export_actor.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/defs.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__scan.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/counters.cpp |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/write_actor.cpp |54.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/columnshard_schema.cpp |54.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/tables_manager.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/inflight_request_tracker.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__propose_transaction.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__write.cpp |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/stats_collector/libproviders-dq-stats_collector.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |54.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__propose_cancel.cpp |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/counters/libpersqueue-public-counters.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/describer/libpersqueue-public-describer.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_key_range/libpersqueue-public-partition_key_range.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |54.0%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/service/libproviders-dq-service.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |54.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard__write_index.cpp |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/retry/libimpl-internal-retry.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/thread_pool/libimpl-internal-thread_pool.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/common/libproviders-solomon-common.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tools/visualize_portions/libpy3visualize_portions.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a >> test.py::py2_flake8 [GOOD] |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |54.2%| [TS] ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 >> test.py::py2_flake8 [GOOD] |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |54.6%| [TS] ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/resolver.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |54.6%| [TS] ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/libclient-nc_private-audit.a >> test_workload.py::flake8 [GOOD] |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/libydb-core-transfer.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |54.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/optimization/libservices-metadata-optimization.a >> test_restarts.py::flake8 [GOOD] >> test_sql_negative.py::flake8 [GOOD] >> test_sql_streaming.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_inserts.py::flake8 [GOOD] >> test_kv.py::flake8 [GOOD] |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/v1/common/libaudit-v1-common.a |54.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/actors/libproviders-clickhouse-actors.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/libapi-client-nc_private.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/common/v1/libnc_private-common-v1.a |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage >> hive_matchers.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |54.7%| [TS] ydb/tests/fq/streaming_optimize/flake8 >> test_sql_streaming.py::flake8 [GOOD] |54.7%| [TS] ydb/tests/stress/viewer/tests/flake8 >> test_workload.py::flake8 [GOOD] |54.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/blob_manager.cpp |54.7%| [TS] ydb/tests/sql/flake8 >> test_kv.py::flake8 [GOOD] |54.8%| [TS] ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/libnc_private-iam-v1.a |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/remove.cpp >> test_leader_start_inflight.py::flake8 [GOOD] >> test_alloc_default.py::flake8 [GOOD] >> test_dc_local.py::flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] |54.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/read.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc_actor.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/action.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/write.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/read.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/common.cpp |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/blob_set.cpp |54.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc_actor.cpp >> test_ttl.py::flake8 [GOOD] |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storages_manager.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/storage.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/remove.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/address.cpp |54.7%| [TS] ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |54.7%| [TS] ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |54.8%| [TS] ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/discovery_mutator/libclient-extensions-discovery_mutator.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |54.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_impl.cpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a >> __main__.py::flake8 [GOOD] >> parser.py::flake8 [GOOD] |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |54.8%| [TS] ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |54.9%| [TS] ydb/tests/olap/docs/generator/flake8 >> parser.py::flake8 [GOOD] |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |54.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |54.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_subdomain_path_id.cpp |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |54.9%| [TS] ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |54.7%| [TS] ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 >> test.py::py2_flake8 [GOOD] |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |54.8%| [TS] ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 >> test.py::py2_flake8 [GOOD] |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |55.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut >> test.py::py2_flake8 [GOOD] >> data_read_correctness.py::flake8 [GOOD] >> order_by_with_limit.py::flake8 [GOOD] >> tablets_movement.py::flake8 [GOOD] >> test_create.py::flake8 [GOOD] >> test_cs_many_updates.py::flake8 [GOOD] >> test_delete.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_log_scenario.py::flake8 [GOOD] >> test_overloads.py::flake8 [GOOD] >> test_replace.py::flake8 [GOOD] >> test_select.py::flake8 [GOOD] >> test_update.py::flake8 [GOOD] >> test_upsert.py::flake8 [GOOD] >> upgrade_to_internal_path_id.py::flake8 [GOOD] >> zip_bomb.py::flake8 [GOOD] >> test_account_actions.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queue_tags.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/cursor.cpp >> test.py::py2_flake8 [GOOD] |54.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/columnshard_view.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |54.4%| [TS] ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/changes.cpp |54.6%| [TS] ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |54.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.h_serialized.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/compaction_info.cpp |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/read_metadata.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/settings.cpp |54.6%| [TS] ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/move_portions.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/comp_nodes/libproviders-ydb-comp_nodes.a |54.5%| [TS] ydb/tests/olap/flake8 >> zip_bomb.py::flake8 [GOOD] |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |54.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc.cpp >> test_workload_topic.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |54.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_write_source_cursor.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |54.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |54.6%| [TS] ydb/tests/stress/topic_kafka/tests/flake8 >> test_workload_topic.py::flake8 [GOOD] |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a >> test.py::py2_flake8 [GOOD] |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |54.7%| [TS] ydb/tests/library/compatibility/binaries/downloader/flake8 >> __main__.py::flake8 [GOOD] |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a >> test_workload.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |54.7%| [TS] ydb/library/yql/tests/sql/hybrid_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/common/libimpl-internal-common.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/ini_config/libini_config.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a >> __main__.py::flake8 [GOOD] |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/make_request/libimpl-internal-make_request.a |54.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/db_driver_state/libimpl-internal-db_driver_state.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a |54.8%| [TS] ydb/tests/stress/s3_backups/tests/flake8 >> test_workload.py::flake8 [GOOD] |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a >> overlapping_portions.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |54.7%| [TS] ydb/tests/fq/yt/kqp_yt_file/part7/flake8 >> test.py::flake8 [GOOD] |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/grpc_connections/libimpl-internal-grpc_connections.a |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |54.4%| [TS] ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |54.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_info.cpp >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |54.8%| [TS] ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/common.cpp |54.9%| [TS] ydb/tests/fq/yt/kqp_yt_file/part15/flake8 >> test.py::flake8 [GOOD] |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |54.9%| [TS] ydb/tests/olap/oom/flake8 >> overlapping_portions.py::flake8 [GOOD] |54.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/bs/write.cpp |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a >> test.py::flake8 [GOOD] |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |55.0%| [TS] ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_data_ack_to_source.cpp >> test.py::py2_flake8 [GOOD] >> basic_user_scenarios.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |54.9%| [TS] ydb/tests/fq/yt/kqp_yt_file/part17/flake8 >> test.py::flake8 [GOOD] |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |55.0%| [TS] ydb/tests/functional/clickbench/flake8 >> test.py::flake8 [GOOD] |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |55.1%| [TS] ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_to_source.cpp |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |55.0%| [TS] ydb/tests/functional/backup_collection/flake8 >> conftest.py::flake8 [GOOD] |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/read.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a >> test.py::py2_flake8 [GOOD] |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/remove.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a >> column_table_helper.py::flake8 [GOOD] >> range_allocator.py::flake8 [GOOD] >> s3_client.py::flake8 [GOOD] >> thread_helper.py::flake8 [GOOD] >> time_histogram.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_client.py::flake8 [GOOD] |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |55.0%| [TS] ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/plain_status/libimpl-internal-plain_status.a |55.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/common/liblibrary-testlib-common.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a >> test.py::py2_flake8 [GOOD] |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/endpoints/libclient-impl-endpoints.a |55.2%| [TS] ydb/tests/olap/common/flake8 >> ydb_client.py::flake8 [GOOD] |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/executor/libclient-impl-executor.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |55.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut >> test.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |55.0%| [TS] ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/actors/libproviders-ydb-actors.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/logger/libimpl-internal-logger.a >> test.py::py2_flake8 [GOOD] >> test_pdisk_format_info.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] >> test_tablet_channel_migration.py::flake8 [GOOD] >> test_vdisks.py::flake8 [GOOD] |55.2%| [TS] ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |55.4%| [TS] ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/stats/libclient-impl-stats.a |55.4%| [TS] ydb/tests/functional/blobstorage/flake8 >> test_vdisks.py::flake8 [GOOD] |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |55.4%| [TS] ydb/tests/stress/show_create/view/tests/flake8 >> test_workload.py::flake8 [GOOD] |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/local_gateway/libproviders-dq-local_gateway.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a >> test_break.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |55.4%| [TS] ydb/tests/functional/minidumps/flake8 >> test_break.py::flake8 [GOOD] |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |55.5%| [TS] ydb/tests/stress/ctas/tests/flake8 >> test_workload.py::flake8 [GOOD] |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/session/libclient-impl-session.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/value_helpers/libimpl-internal-value_helpers.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc_actor.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a |55.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |55.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_finish_ack_to_source.cpp |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |55.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tx_completed/libsubscriber-events-tx_completed.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tracing/libtx-columnshard-tracing.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a >> test_disk.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_common.py::flake8 [GOOD] |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/http-parser/libcontrib-deprecated-http-parser.a |55.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_source_cursor.cpp >> test_yandex_audit.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |55.5%| [TS] ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |55.6%| [TS] ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a >> test_cp_ic.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> test_retry_high_rate.py::flake8 [GOOD] |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |55.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |55.6%| [TS] ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |55.6%| [TS] ydb/tests/fq/yt/kqp_yt_file/part18/flake8 >> test.py::flake8 [GOOD] |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a >> conftest.py::flake8 [GOOD] >> test_insert_restarts.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] |55.6%| [TS] ydb/tests/fq/yt/kqp_yt_file/part2/flake8 >> test.py::flake8 [GOOD] |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |55.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.global.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a >> test_restarts.py::flake8 [GOOD] >> test_encryption.py::flake8 [GOOD] >> tablet_scheme_tests.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/executor/libclient-types-executor.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |55.7%| [TS] ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |55.7%| [TS] ydb/tests/functional/statistics/flake8 >> test_restarts.py::flake8 [GOOD] |55.7%| [TS] ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |55.7%| [TS] ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |55.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/source.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> docker_wrapper_test.py::flake8 [GOOD] |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/crypto/quictls/libngtcp2-crypto-quictls.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a >> test_base.py::flake8 [GOOD] >> test_http_api.py::flake8 [GOOD] |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |55.9%| [TS] ydb/tests/stress/kv/tests/flake8 >> test_workload.py::flake8 [GOOD] |55.9%| [TS] ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 >> test.py::py2_flake8 [GOOD] |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/procfs/libyt-library-procfs.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |55.9%| [TS] ydb/tests/fq/yt/kqp_yt_file/part12/flake8 >> test.py::flake8 [GOOD] |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/time/libsrc-library-time.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |55.9%| [TS] ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |56.0%| [TS] ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/libutils-fetch-proto.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/codes/libservices-datastreams-codes.a >> test.py::flake8 [GOOD] |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bridge/libydb-services-bridge.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a >> test_async_replication.py::flake8 [GOOD] >> test_multinode_cluster.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |56.0%| [TS] ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |55.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |56.0%| [TS] ydb/tests/datashard/async_replication/flake8 >> test_async_replication.py::flake8 [GOOD] |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |56.1%| [TS] ydb/tests/fq/yt/kqp_yt_file/part6/flake8 >> test.py::flake8 [GOOD] |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a >> test_decimal.py::flake8 [GOOD] >> test_duplicates.py::flake8 [GOOD] >> test_parallel.py::flake8 [GOOD] >> test_s1.py::flake8 [GOOD] >> test_s_float.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |56.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |56.1%| [TS] ydb/tests/fq/yt/kqp_yt_file/part14/flake8 >> test.py::flake8 [GOOD] |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |56.1%| [TS] ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/build/cow/on/libbuild-cow-on.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a >> test.py::flake8 [GOOD] |56.1%| [TS] ydb/tests/functional/tpc/medium/tpch/flake8 >> test_s_float.py::flake8 [GOOD] |56.1%| [TS] ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |56.1%| [TS] ydb/tests/stress/olap_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |56.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> test_s3.py::flake8 [GOOD] |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |56.2%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/hooks/abstract/abstract.cpp |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |56.3%| [TS] ydb/tests/fq/yt/kqp_yt_file/part10/flake8 >> test.py::flake8 [GOOD] >> reconfig_state_storage_workload_test.py::flake8 [GOOD] >> test_board_workload.py::flake8 [GOOD] >> test_scheme_board_workload.py::flake8 [GOOD] >> test_state_storage_workload.py::flake8 [GOOD] |56.2%| [TS] ydb/tests/datashard/s3/flake8 >> test_s3.py::flake8 [GOOD] |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |56.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/data_sharing/modification/tasks/modification.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/queue_url/libservices-sqs_topic-queue_url.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a >> test_split_merge.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.3%| [TS] ydb/tests/stress/reconfig_state_storage_workload/tests/flake8 >> test_state_storage_workload.py::flake8 [GOOD] |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a >> test_workload.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_grants.py::flake8 [GOOD] >> test_paths_lookup.py::flake8 [GOOD] |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/libydb-services-sqs_topic.a >> test.py::flake8 [GOOD] |56.3%| [TS] ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.3%| [TS] ydb/tests/datashard/split_merge/flake8 >> test_split_merge.py::flake8 [GOOD] |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/enc/libbrotli-c-enc.a |56.4%| [TS] ydb/tests/stress/oltp_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |56.3%| [TS] ydb/tests/fq/yt/kqp_yt_file/part19/flake8 >> test.py::flake8 [GOOD] |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |56.4%| [TS] ydb/tests/functional/security/flake8 >> test_paths_lookup.py::flake8 [GOOD] |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a >> test_workload.py::flake8 [GOOD] >> test_add_column.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test_schemeshard_limits.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |56.4%| [TS] ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |56.5%| [TS] ydb/tests/fq/yt/kqp_yt_file/part13/flake8 >> test.py::flake8 [GOOD] |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |56.5%| [TS] ydb/tests/datashard/add_column/flake8 >> test_add_column.py::flake8 [GOOD] |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |56.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |56.5%| [TS] ydb/tests/stress/cdc/tests/flake8 >> test_workload.py::flake8 [GOOD] |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a >> helpers.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_query.py::flake8 [GOOD] >> test_s3.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::flake8 [GOOD] |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ibdrv/libcontrib-libs-ibdrv.a |56.6%| [TS] ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.6%| [TS] ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |56.6%| [TS] ydb/tests/stress/transfer/tests/flake8 >> test_workload.py::flake8 [GOOD] |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |56.6%| [TS] ydb/tests/sql/lib/flake8 >> test_s3.py::flake8 [GOOD] |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a >> test.py::py2_flake8 [GOOD] |56.6%| [TS] ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |56.7%| [TS] ydb/tests/fq/yt/kqp_yt_file/part4/flake8 >> test.py::flake8 [GOOD] |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a >> test_base.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] >> test_sql_logic.py::flake8 [GOOD] >> test_stream_query.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/libname-object-simple.a |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/local/libcomplete-analysis-local.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a >> test.py::flake8 [GOOD] >> test_kafka_streams.py::flake8 [GOOD] |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |56.7%| [TS] ydb/tests/functional/suite_tests/flake8 >> test_stream_query.py::flake8 [GOOD] |56.7%| [TS] ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/adapter.cpp |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a >> test_dml.py::flake8 [GOOD] |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |56.7%| [TS] ydb/tests/fq/yt/kqp_yt_file/part1/flake8 >> test.py::flake8 [GOOD] |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |56.8%| [TS] ydb/tests/stress/simple_queue/tests/flake8 >> test_workload.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_ctas.py::flake8 [GOOD] >> test_yt_reading.py::flake8 [GOOD] |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |56.8%| [TS] ydb/tests/stress/kafka/tests/flake8 >> test_kafka_streams.py::flake8 [GOOD] |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |56.8%| [TS] ydb/tests/datashard/dml/flake8 >> test_dml.py::flake8 [GOOD] >> test_clean.py::flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> test_default_path.py::flake8 [GOOD] >> test_diff_processing.py::flake8 [GOOD] >> test_external.py::flake8 [GOOD] >> test_import_csv.py::flake8 [GOOD] >> test_upload.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |56.8%| [TS] ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |56.8%| [TS] ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 >> test.py::py2_flake8 [GOOD] |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a >> test_select.py::flake8 [GOOD] >> pile_promotion_test.py::flake8 [GOOD] >> test_scheme_board_workload.py::flake8 [GOOD] |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |56.9%| [TS] ydb/tests/fq/yt/kqp_yt_import/flake8 >> test_yt_reading.py::flake8 [GOOD] |56.9%| [TS] ydb/tests/functional/tpc/medium/flake8 >> test_workload_manager.py::flake8 [GOOD] |56.9%| [TS] ydb/tests/functional/tpcc/flake8 >> test.py::flake8 [GOOD] |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a >> test.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> kikimr_config.py::flake8 [GOOD] |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |56.9%| [TS] ydb/tests/datashard/select/flake8 >> test_select.py::flake8 [GOOD] |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |57.0%| [TS] ydb/tests/stress/scheme_board/pile_promotion/tests/flake8 >> test_scheme_board_workload.py::flake8 [GOOD] |57.0%| [TS] ydb/tests/fq/yt/kqp_yt_file/part16/flake8 >> test.py::flake8 [GOOD] |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |57.0%| [TS] ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a >> test.py::py2_flake8 [GOOD] |57.0%| [TS] ydb/tests/library/ut/flake8 >> kikimr_config.py::flake8 [GOOD] |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/third_party/utf8_range/libprotobuf-third_party-utf8_range.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a >> test_query_cache.py::flake8 [GOOD] >> test_sql.py::flake8 [GOOD] |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |57.1%| [TS] ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/no_percpu_cache/liblibs-tcmalloc-no_percpu_cache.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |57.1%| [TS] ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_quota_exhaustion.py::flake8 [GOOD] |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/tracing/libtx-conveyor-tracing.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/tracing/libtx-conveyor_composite-tracing.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |57.2%| [TS] ydb/library/yql/tests/sql/solomon/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |57.2%| [TS] ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |57.2%| [TS] ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut >> test_clickbench.py::flake8 [GOOD] >> test_external.py::flake8 [GOOD] >> test_import_csv.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test_upload.py::flake8 [GOOD] >> test_workload_cdc.py::flake8 [GOOD] >> test_workload_ctas.py::flake8 [GOOD] >> test_workload_kafka.py::flake8 [GOOD] |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |57.2%| [TS] ydb/tests/olap/data_quotas/flake8 >> test_quota_exhaustion.py::flake8 [GOOD] |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |57.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/tracing/liblimiter-grouped_memory-tracing.a |57.3%| [TS] ydb/tests/fq/yt/kqp_yt_file/part5/flake8 >> test.py::flake8 [GOOD] >> test_workload_kv.py::flake8 [GOOD] >> test_workload_log.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] >> test_workload_mixed.py::flake8 [GOOD] >> test_workload_node_broker.py::flake8 [GOOD] >> test_workload_olap.py::flake8 [GOOD] >> test_workload_oltp.py::flake8 [GOOD] >> test_workload_parallel.py::flake8 [GOOD] >> test_workload_reconfig_state_storage.py::flake8 [GOOD] >> test_workload_show_create.py::flake8 [GOOD] >> test_workload_topic.py::flake8 [GOOD] >> test_workload_simple_queue.py::flake8 [GOOD] >> test_workload_statistics.py::flake8 [GOOD] >> test_workload_topic.py::flake8 [GOOD] >> test_workload_topic_kafka.py::flake8 [GOOD] >> test_workload_transfer.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |57.2%| [TS] ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a >> conftest.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |57.4%| [TS] ydb/tests/olap/load/flake8 >> test_workload_transfer.py::flake8 [GOOD] |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a >> test_liveness_wardens.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |57.3%| [TS] ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |57.4%| [TS] ydb/tests/stress/topic/tests/flake8 >> test_workload_topic.py::flake8 [GOOD] |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a >> conftest.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_big_state.py::flake8 [GOOD] >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test_disposition.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] >> test_kill_pq_bill.py::flake8 [GOOD] >> test_mem_alloc.py::flake8 [GOOD] >> test_metrics_cleanup.py::flake8 [GOOD] >> test_pq_read_write.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] >> test_restart_query.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> http_helpers.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] >> test_canonical_records.py::flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_streaming.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |57.4%| [TS] ydb/tests/fq/solomon/flake8 >> test.py::flake8 [GOOD] |57.4%| [TS] ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a >> test_unknown_data_source.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] |57.4%| [TS] ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |57.4%| [TS] ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |57.5%| [TS] ydb/tests/stress/log/tests/flake8 >> test_workload.py::flake8 [GOOD] |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/signals/libyt-library-signals.a |57.5%| [TS] ydb/tests/functional/audit/flake8 >> test_canonical_records.py::flake8 [GOOD] >> alter_compression.py::flake8 [GOOD] >> base.py::flake8 [GOOD] |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |57.5%| [TS] ydb/tests/fq/streaming/flake8 >> test_streaming.py::flake8 [GOOD] |57.5%| [TS] ydb/tests/fq/yt/kqp_yt_file/part9/flake8 >> test.py::flake8 [GOOD] |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |57.6%| [TS] ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/histogram/libessentials-core-histogram.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/langver/libessentials-core-langver.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a >> test.py::flake8 [GOOD] |57.6%| [TS] ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] |57.6%| [TS] ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut >> test_vector_index.py::flake8 [GOOD] >> test_vector_index_negative.py::flake8 [GOOD] |57.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_draft.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/layers/libessentials-core-layers.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |57.7%| [TS] ydb/tests/olap/column_family/compression/flake8 >> base.py::flake8 [GOOD] |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a >> test_kqprun_recipe.py::flake8 [GOOD] >> test_parametrized_queries.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |57.7%| [TS] ydb/tests/fq/yt/kqp_yt_file/part11/flake8 >> test.py::flake8 [GOOD] |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |57.8%| [TS] ydb/tests/datashard/vector_index/medium/flake8 >> test_vector_index_negative.py::flake8 [GOOD] |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/url_lister/libcore-qplayer-url_lister.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a >> test_commit.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> test.py::py2_flake8 [GOOD] |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |57.8%| [TS] ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |57.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_indexed.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc.cpp |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |57.8%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.cpp |57.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/remove_portions.cpp |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a >> test.py::py2_flake8 [GOOD] >> test_quoting.py::flake8 [GOOD] >> test_actorsystem.py::flake8 [GOOD] |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/source/libtx-general_cache-source.a |57.9%| [TS] ydb/tests/datashard/parametrized_queries/flake8 >> test_parametrized_queries.py::flake8 [GOOD] |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a >> test_postgres.py::flake8 [GOOD] >> test_cte.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_auth_system_views.py::flake8 [GOOD] >> test_create_users.py::flake8 [GOOD] >> test_create_users_strict_acl_checks.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] >> test_user_administration.py::flake8 [GOOD] >> test_users_groups_with_acl.py::flake8 [GOOD] |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.a |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/constructor.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |58.0%| [TS] ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut >> test_fifo_messaging.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/rpc_client/libcpp-mapreduce-rpc_client.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |58.0%| [TS] ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 >> test.py::py2_flake8 [GOOD] >> test_bulkupserts_tpch.py::flake8 [GOOD] >> test_insert_delete_duplicate_records.py::flake8 [GOOD] >> test_insertinto_selectfrom.py::flake8 [GOOD] >> test_tiering.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |58.0%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/write.cpp |58.0%| [TS] ydb/tests/stress/mixedpy/tests/flake8 >> test_workload.py::flake8 [GOOD] |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |58.1%| [TS] ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay >> test_dump_restore.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> data_correctness.py::flake8 [GOOD] >> data_migration_when_alter_ttl.py::flake8 [GOOD] >> tier_delete.py::flake8 [GOOD] >> ttl_delete_s3.py::flake8 [GOOD] >> ttl_unavailable_s3.py::flake8 [GOOD] >> unstable_connection.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tz_types/libyt-library-tz_types.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |58.1%| [TS] ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 >> test.py::py2_flake8 [GOOD] |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |58.1%| [TS] ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] |58.1%| [TS] ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a >> test_ttl.py::flake8 [GOOD] |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/local/libname-cache-local.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/liblibrary-cpp-lfalloc.a |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |58.2%| [TS] ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |58.2%| [TS] ydb/tests/functional/tenants/flake8 >> test_users_groups_with_acl.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_recursive_remove.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_alter_compression.py::flake8 [GOOD] >> test_alter_tiering.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_read_update_write_load.py::flake8 [GOOD] >> test_scheme_load.py::flake8 [GOOD] >> test_simple.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> test_delete_all_after_inserts.py::flake8 [GOOD] >> test_delete_by_explicit_row_id.py::flake8 [GOOD] |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_inference/libydb-library-arrow_inference.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a >> test_alter_ops.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/libcomplete-name-cache.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/osiris.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |58.3%| [TS] ydb/tests/functional/kqp/plan2svg/flake8 >> test_cte.py::flake8 [GOOD] |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut >> test.py::flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_secondary_index.py::flake8 [GOOD] |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a >> conftest.py::flake8 [GOOD] >> test_secrets.py::flake8 [GOOD] >> test_secrets_usage.py::flake8 [GOOD] |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tools/join_perf/bin/main.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a >> test_large_import.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.a |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication_huge.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> test.py::flake8 [GOOD] >> test_example.py::flake8 [GOOD] |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |58.3%| [TS] ydb/tests/sql/large/flake8 >> test_workload_manager.py::flake8 [GOOD] |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |58.3%| [TS] ydb/tests/stress/node_broker/tests/flake8 >> test_workload.py::flake8 [GOOD] |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |58.4%| [TS] ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |58.4%| [TS] ydb/tests/datashard/dump_restore/flake8 >> test_dump_restore.py::flake8 [GOOD] >> test_cms_erasure.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] |58.3%| [TS] ydb/tests/olap/ttl_tiering/flake8 >> unstable_connection.py::flake8 [GOOD] |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/race.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |58.4%| [TS] ydb/tests/olap/scenario/flake8 >> test_simple.py::flake8 [GOOD] |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |58.4%| [TS] ydb/tests/datashard/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_blobs_written.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |58.5%| [TS] ydb/tests/olap/delete/flake8 >> test_delete_by_explicit_row_id.py::flake8 [GOOD] |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/jemalloc/libcpp-malloc-jemalloc.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |58.5%| [TS] ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/tier/storage.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |58.6%| [TS] ydb/tests/fq/yt/kqp_yt_file/part8/flake8 >> test.py::flake8 [GOOD] |58.6%| [TS] ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |58.6%| [TS] ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] >> allure_utils.py::flake8 [GOOD] >> remote_execution.py::flake8 [GOOD] >> results_processor.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_cli.py::flake8 [GOOD] >> ydb_cluster.py::flake8 [GOOD] |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.global.a |58.6%| [TS] ydb/tests/fq/yt/kqp_yt_file/part3/flake8 >> test.py::flake8 [GOOD] |58.6%| [TS] ydb/tests/datashard/secondary_index/flake8 >> test_secondary_index.py::flake8 [GOOD] |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/statestorage_2_ring_groups.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write_index.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |58.6%| [TS] ydb/tests/olap/s3_import/large/flake8 >> test_large_import.py::flake8 [GOOD] |58.6%| [TS] ydb/tests/functional/secrets/flake8 >> test_secrets_usage.py::flake8 [GOOD] |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |58.6%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_remove_blobs.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |58.7%| [TS] ydb/core/tx/columnshard/tools/visualize_portions/flake8 >> __main__.py::flake8 [GOOD] |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |58.6%| [TS] ydb/tests/fq/yt/kqp_yt_file/part0/flake8 >> test.py::flake8 [GOOD] |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |58.7%| [TS] ydb/tests/example/flake8 >> test_example.py::flake8 [GOOD] |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/move_pdisk.cpp >> test_crud.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |58.7%| [TS] ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |58.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/balancing.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/flat_ut.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohappyeyeballs/libpy3contrib-python-aiohappyeyeballs.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |58.8%| [TS] ydb/tests/olap/lib/flake8 >> ydb_cluster.py::flake8 [GOOD] |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libpy3core-protos-nbs.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a >> test_copy_table.py::flake8 [GOOD] |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |58.9%| [TS] ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/import_test/libpy3python-testing-import_test.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |59.0%| [TS] ydb/core/viewer/tests/flake8 >> test.py::flake8 [GOOD] |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp >> base.py::flake8 [GOOD] >> test_simple_table.py::flake8 [GOOD] >> test_tpch_import.py::flake8 [GOOD] >> test_types_and_formats.py::flake8 [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/pq/provider/ut/yql_pq_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_secrets_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp >> base.py::flake8 [GOOD] >> basic_reading.py::flake8 [GOOD] >> data_paging.py::flake8 [GOOD] >> listing_paging.py::flake8 [GOOD] >> settings_validation.py::flake8 [GOOD] |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/sqs_topic_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_auth.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |59.1%| [TS] ydb/tests/datashard/copy_table/flake8 >> test_copy_table.py::flake8 [GOOD] |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/metrics/libproviders-dq-metrics.a |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/raw_socket/ut/buffered_writer_ut.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |59.1%| [TS] ydb/tests/olap/s3_import/flake8 >> test_types_and_formats.py::flake8 [GOOD] |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/solomon_helpers/liblibrary-testlib-solomon_helpers.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_find_split_key.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dq/worker_node/main.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |59.2%| [TS] ydb/tests/solomon/reading/flake8 >> settings_validation.py::flake8 [GOOD] |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_disk_quotas.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/audit_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/datastreams/datastreams_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/url_matcher_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/client/linux/libsrc-client-linux.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/registry/libcpp-dwarf_backtrace-registry.global.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/solomon/ydb-library-yql-tests-sql-solomon |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ftxui/libcontrib-libs-ftxui.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/ut_streaming_query_reboots.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_block_hash_join_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_fulltext_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_state_ut.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/datastreams_fixture/libhttp_proxy-ut-datastreams_fixture.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/liblibs-breakpad-src.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/describer/describer_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_registry.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/limiter/grouped_memory/ut/ut_manager.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/stress/libpy3tests-library-stress.global.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_2900a0c4957bb4f1bc1729508c.o |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/libcontrib-libs-tcmalloc.global.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ut_topic_set_boundaries.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/scheduler/libproviders-dq-scheduler.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part3/ydb-tests-fq-yt-kqp_yt_file-part3 |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/global_worker_manager/libproviders-dq-global_worker_manager.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/topic/utils/libintegration-topic-utils.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/locks_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_fat.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl_qs.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview/ut_sysview.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/yt/libdq-actors-yt.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/objcopy_484246668d943fbae3b476ec7d.o >> conftest.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_validation.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part7/ydb-tests-fq-yt-kqp_yt_file-part7 |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part4/ydb-tests-fq-yt-kqp_yt_file-part4 |59.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part15/ydb-tests-fq-yt-kqp_yt_file-part15 |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/hooks/testing/controller.cpp |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/libpy3utils-fetch-proto.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_c386e2211742a44d16094d73d0.o |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_dac3ec236f3cba753ea226bb96.o |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |60.0%| [TS] ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/jemalloc/libcontrib-libs-jemalloc.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/color/libinteractive-highlight-color.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/cached/libobject-simple-cached.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/impatient/libname-service-impatient.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/vector/liblibrary-workload-vector.a |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |60.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |60.1%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/hooks/testing/ro_controller.cpp |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/replication.cpp |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/flat_table_part.pb.{h, cc} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/unittests.pb.{h, cc} |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcc/liblibrary-workload-tpcc.a |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_datashard.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/error.pb.{h, cc} |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/retry_config.pb.{h, cc} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statistics.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/datastreams.pb.{h, cc} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_mediator_timecast.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/field_transformation.pb.{h, cc} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/ydb_discovery_v1.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/ydb_datastreams_v1.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |60.2%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/blob_range.pb.{h, cc} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/task.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_config.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/event.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/initiator.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/task_controller.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/serverless_proxy_config.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |60.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/blockstore.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/portion_info.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/flat_executor.pb.{h, cc} |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/yql_mount.pb.{h, cc} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |60.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |60.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/generated/control_board_proto.h |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/udf_resolver.pb.{h, cc} |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/row_table/ydb-core-transfer-ut-row_table |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/selector.pb.{h, cc} |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/blobs.pb.{h, cc} |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tools/visualize_portions/visualize_portions |60.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |60.3%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/olap_workload |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |60.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |60.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |60.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |60.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/workload/libpy3stress-kafka-workload.global.a |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dump_restore/ydb-tests-datashard-dump_restore |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/objcopy_c96ef635306ccee8a5cf6359f1.o |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/action_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/queue_attributes_ut.cpp |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ydb-tests-olap |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/ut/dq_hash_join_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/secure_protobuf_printer_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/comp_nodes/ut/dq_hash_combine_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_storage/internal/ut/utils_ut.cpp |59.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/secrets/ydb-tests-functional-secrets |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/tools/sql2yql/sql2yql |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/dlq_helpers_ut.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/counters_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/base/ut/helpers_ut.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/yql/libcomplete-analysis-yql.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cluster/static/libname-cluster-static.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/check/libv1-complete-check.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/cluster/libname-service-cluster.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/check/libv1-format-check.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/test_connection/ut/test_connection_ut.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/check/libv1-lexer-check.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/tools/sql2yql/sql2yql.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a >> ydb-tests-fq-yt-kqp_yt_import::import_test [GOOD] |59.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yt/kqp_yt_import/import_test >> ydb-tests-fq-yt-kqp_yt_import::import_test [GOOD] |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo >> ydb-tests-stress-simple_queue-tests::import_test [GOOD] |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/simple/libcore-cbo-simple.a |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/tools/yqlrun/yqlrun |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/libessentials-core-url_lister.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/simple_queue/tests/import_test >> ydb-tests-stress-simple_queue-tests::import_test [GOOD] |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/discovery/kqp_discovery_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp >> visualize_portions::import_test [GOOD] |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/http/ut/xml_builder_ut.cpp |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/minidumps/ydb-tests-functional-minidumps |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/yql_facade_run/libessentials-tools-yql_facade_run.a |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_produce_actor.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_test_functions.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_actor.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot_event_managers.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/federated_query/kqp_federated_query_actors_ut.cpp |58.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/tools/visualize_portions/import_test >> visualize_portions::import_test [GOOD] |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/queue_id_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/ut/params_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/blob_depot.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/file/libqplayer-storage-file.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_testshard/main.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/bindings/libyql-utils-bindings.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_system/libyql-utils-actor_system.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/actors/libproviders-yt-actors.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/dqrun/lib/libtools-dqrun-lib.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/tools/dqrun/dqrun.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/yt/dq_task_preprocessor/libproviders-yt-dq_task_preprocessor.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/testing/group_overseer/libblobstorage-testing-group_overseer.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/large_results/kqp_scriptexec_results_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/comp_defrag.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/memory/libqplayer-storage-memory.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/libessentials-core-url_preprocessing.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a >> ydb-tests-stress-olap_workload-tests::import_test [GOOD] |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/http/libtools-yqlrun-http.global.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/backpressure.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/http/libtools-yqlrun-http.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/tools/yqlrun/yqlrun.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_size_in_units.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/tools/yqlrun/lib/libtools-yqlrun-lib.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp >> ydb-tests-functional-sqs-cloud::import_test [GOOD] |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_restart.cpp |58.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/olap_workload/tests/import_test >> ydb-tests-stress-olap_workload-tests::import_test [GOOD] |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/check_integrity.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/huge.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/self_heal.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_changer_ut.cpp |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_reader_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_ut.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/mlp/libpersqueue-public-mlp.a |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/cloud/import_test >> ydb-tests-functional-sqs-cloud::import_test [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/corrupted_reads.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3dc.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/stop_pdisk.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp >> ydb-tests-stress-ctas-tests::import_test [GOOD] |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/counting_events.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_request_check_actor_ut/grpc_request_check_actor_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/block_race.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/multiget.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/assimilation.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/main.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/defrag.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/extra_block_checks.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/monitoring.cpp |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/ctas/tests/import_test >> ydb-tests-stress-ctas-tests::import_test [GOOD] |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/group_reconfiguration.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get_block.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/decommit_3dc.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/get.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/discover.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/index_restore_get.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/mirror3of4.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub_fast.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/main.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/recovery.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/patch.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/ds_proxy_lwtrace.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sanitize_groups.cpp |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/vector_index/medium/ydb-tests-datashard-vector_index-medium |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/snapshots.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/shred.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/encryption.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/phantom_blobs.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/validation.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/sync.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/vdisk_malfunction.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/incorrect_queries.cpp >> ydb-tests-fq-restarts::import_test [GOOD] |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/donor.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/bsc_cache.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/gc_quorum_3dc.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_selfheal/self_heal_actor_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/tools/combiner_perf/bin/main.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/space_check.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/deadlines.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp >> ydb-tests-fq-mem_alloc::import_test [GOOD] |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/acceleration.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/fields.pb.{h, cc} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/events.pb.{h, cc} |58.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/mem_alloc/import_test >> ydb-tests-fq-mem_alloc::import_test [GOOD] |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |58.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |59.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |58.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/restarts/import_test >> ydb-tests-fq-restarts::import_test [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_point_consolidation_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_fetcher_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp >> ydb-tests-fq-plans::import_test [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/describe_ut.cpp |58.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/plans/import_test >> ydb-tests-fq-plans::import_test [GOOD] |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |58.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part18/ydb-tests-fq-yt-kqp_yt_file-part18 |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_timestamp_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_autoscaling_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_ut.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |57.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/tools/dqrun/dqrun |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/cloud_events_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/tools/ytrun/lib/libtools-ytrun-lib.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/secret_masker/dummy/liblib-secret_masker-dummy.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_unique_index.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_url_lister/libyt-lib-yt_url_lister.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/client/libfmr-coordinator-client.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/interface/libfmr-job-interface.a |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/impl/libfmr-worker-impl.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/proto_helpers/libcoordinator-interface-proto_helpers.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/interface/libcoordinator-yt_coordinator_service-interface.a |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/impl/libfmr-job_factory-impl.a >> ydb-tests-functional-ydb_cli::import_test [GOOD] |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/process/libyt-fmr-process.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/interface/libfmr-coordinator-interface.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/impl/libcoordinator-yt_coordinator_service-impl.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/yt_coordinator_service/file/libcoordinator-yt_coordinator_service-file.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.global.a |58.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/ydb_cli/import_test >> ydb-tests-functional-ydb_cli::import_test [GOOD] >> ydb-tests-functional-secrets::import_test [GOOD] |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/coordinator/impl/libfmr-coordinator-impl.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/discovery/interface/libtable_data_service-discovery-interface.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_launcher/libyt-fmr-job_launcher.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/gc_service/interface/libfmr-gc_service-interface.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/fmr_tool_lib/libyt-fmr-fmr_tool_lib.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/interface/libfmr-table_data_service-interface.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/gc_service/impl/libfmr-gc_service-impl.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job_factory/interface/libfmr-job_factory-interface.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/job/impl/libfmr-job-impl.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/utils/libyt-fmr-utils.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/discovery/file/libtable_data_service-discovery-file.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/proto_helpers/libfmr-request_options-proto_helpers.a |58.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/secrets/import_test >> ydb-tests-functional-secrets::import_test [GOOD] |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/request_options/libyt-fmr-request_options.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/client/proto_helpers/libtable_data_service-client-proto_helpers.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/proto/libyt-fmr-proto.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/client/impl/libtable_data_service-client-impl.a >> ydb-tests-olap::import_test [GOOD] |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/functional/ydb-core-transfer-ut-functional |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/interface/libfmr-yt_job_service-interface.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_recompute_kmeans.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/worker/interface/libfmr-worker-interface.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/impl/libtable_data_service-local-impl.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/table_data_service/local/interface/libtable_data_service-local-interface.a |58.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/import_test >> ydb-tests-olap::import_test [GOOD] |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/impl/libfmr-yt_job_service-impl.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/fmr/yt_job_service/file/libfmr-yt_job_service-file.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/fmr/libyt-gateway-fmr.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_fulltext_index.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp >> ydb-tests-stress-node_broker-tests::import_test [GOOD] |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_union_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |58.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test >> functional-sqs-merge_split_common_table-fifo::import_test [GOOD] |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |58.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/ydb-tests-sql |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_schemeshard_build_index_helpers.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ut_resource_pool_reboots.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret/ut_secret.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |58.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/node_broker/tests/import_test >> ydb-tests-stress-node_broker-tests::import_test [GOOD] |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp >> ydb-tests-functional-minidumps::import_test [GOOD] |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_hash_combine_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/ut_continuous_backup_reboots.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |58.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |58.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/minidumps/import_test >> ydb-tests-functional-minidumps::import_test [GOOD] |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/objcopy_b96df764969d83c871c54cf9e5.o |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp >> ydb-tests-sql-large::import_test [GOOD] |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool/ut_resource_pool.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview_reboots/ut_sysview_reboots.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp >> ydb-tests-functional-api::import_test [GOOD] |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |58.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/large/import_test >> ydb-tests-sql-large::import_test [GOOD] |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_failure_injection/ut_failure_injection.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/api/import_test >> ydb-tests-functional-api::import_test [GOOD] |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_incremental_restore.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_vacuum.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_scale_manager_graph_cmp_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_bridge_common.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/links.pb.{h, cc} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} >> ydb-tests-stress-topic_kafka-tests::import_test [GOOD] >> ydb-tests-functional-blobstorage::import_test [GOOD] |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/data.pb.{h, cc} |58.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/topic_kafka/tests/import_test >> ydb-tests-stress-topic_kafka-tests::import_test [GOOD] |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_system_names/ut_system_names.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |58.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/blobstorage/import_test >> ydb-tests-functional-blobstorage::import_test [GOOD] |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.{pb.h ... grpc.pb.h} |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_columnshard.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |58.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part6/ydb-tests-fq-yt-kqp_yt_file-part6 |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_hive.{pb.h ... grpc.pb.h} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp >> ydb-tests-functional-query_cache::import_test [GOOD] |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |58.0%| [EN] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |58.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/objcopy_1406195445f45d950dda89fcd8.o |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |58.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/query_cache/import_test >> ydb-tests-functional-query_cache::import_test [GOOD] |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} >> ydb-tests-fq-yds::import_test [GOOD] |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/sessions.pb.{h, cc} |58.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/yds/import_test >> ydb-tests-fq-yds::import_test [GOOD] |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/fetch_config.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/slow/txusage_slow_ut.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret_reboots/ut_secret_reboots.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut >> ydb-tests-stress-mixedpy-tests::import_test [GOOD] |58.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/storage.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} >> ydb-tests-functional-tenants::import_test [GOOD] >> tests-stress-scheme_board-pile_promotion-tests::import_test [GOOD] |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/ut_service/ydb-core-kqp-federated_query-ut_service |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |58.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/mixedpy/tests/import_test >> ydb-tests-stress-mixedpy-tests::import_test [GOOD] |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/main.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_proxy.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/main.cpp |58.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tenants/import_test >> ydb-tests-functional-tenants::import_test [GOOD] |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pg_ydb_connection.cpp |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/pgwire/pgwire.cpp |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |58.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/scheme_board/pile_promotion/tests/import_test >> tests-stress-scheme_board-pile_promotion-tests::import_test [GOOD] |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/etcd_proxy/proxy.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.global.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a >> ydb-tests-datashard-dump_restore::import_test [GOOD] |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/service/libapps-etcd_proxy-service.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/etcd_proxy/proto/libetcd-grpc.a |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_discover_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots >> test_partitioning.py::flake8 [GOOD] |58.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/dump_restore/import_test >> ydb-tests-datashard-dump_restore::import_test [GOOD] |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/dq_effects.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |58.2%| [TS] ydb/tests/datashard/partitioning/flake8 >> test_partitioning.py::flake8 [GOOD] |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |58.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/objcopy_4f055c289b3de8f2a1e827ae5c.o |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/direct_read_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_tx.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/mon_proto.pb.{h, cc} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/cursor.pb.{h, cc} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/events.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/restart_pdisk.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/data.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/statestorage.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/data_quotas/ydb-tests-olap-data_quotas |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/metric_meta.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |58.2%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/yql_expr_nodes.{gen.h ... defs.inl.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kesus.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/library/recipes/docker_compose/docker_compose |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |58.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_parser/enum_parser |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |58.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |58.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |58.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part8/ydb-tests-fq-yt-kqp_yt_file-part8 |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view >> ydb-tests-datashard-vector_index-medium::import_test [GOOD] |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/cluster_balancing.cpp |61.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/vector_index/medium/import_test >> ydb-tests-datashard-vector_index-medium::import_test [GOOD] |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp >> ydb-tests-stress-viewer-tests::import_test [GOOD] |61.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp >> kqprun_recipe::import_test [GOOD] |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |62.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/viewer/tests/import_test >> ydb-tests-stress-viewer-tests::import_test [GOOD] |63.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/tools/kqprun/recipe/import_test >> kqprun_recipe::import_test [GOOD] |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_re2_ut.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_hash_shuffle_ut.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |64.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |67.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |68.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |68.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_reattach_ut.cpp |68.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_overload_ut.cpp |68.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |68.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |68.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |70.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |73.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |77.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |78.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/scrub.cpp |78.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |78.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp >> ydb-tests-olap-data_quotas::import_test [GOOD] |78.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |78.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |79.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |79.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/data_quotas/import_test >> ydb-tests-olap-data_quotas::import_test [GOOD] |79.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |79.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |80.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |80.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |80.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std >> ydb-tests-sql::import_test [GOOD] |81.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/bridge_get.cpp |81.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/sql/import_test >> ydb-tests-sql::import_test [GOOD] |81.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/ydb/ydb >> ydb-tests-functional-encryption::import_test [GOOD] |81.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |81.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |82.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/simple_reader_ut.cpp |82.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/encryption/import_test >> ydb-tests-functional-encryption::import_test [GOOD] |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |82.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |82.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |82.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |82.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/bool_test_enums.h_serialized.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compaction_ut.cpp |82.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/bool_ut.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |82.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |82.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |82.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |82.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |83.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |84.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |84.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |84.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |83.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |83.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |83.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/yt/yt/client/libyt-yt-client.a |83.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |83.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |83.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |83.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |83.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |83.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_pdisk.cpp |83.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |83.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/ut/objcopy_caf222d14387d4810b5cb3e853.o |83.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/objcopy_5fddfa8f171a3216cad65e02ab.o |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |83.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tools/sql2yql/sql2yql |83.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |83.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |83.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |83.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |83.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |83.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |83.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |83.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/config_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |83.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |83.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/compute/common/ut/utils_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/http_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |83.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |83.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |83.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |83.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |83.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |83.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |83.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/pgwire/pgwire |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |83.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |83.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |83.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |83.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |83.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |83.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |83.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |83.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |83.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |83.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |83.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |83.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |83.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |83.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |83.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_rollback.cpp |83.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |83.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part2/ydb-tests-fq-yt-kqp_yt_file-part2 |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |82.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_snapshot_readonly.cpp |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |82.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |82.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |83.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |83.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |83.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/mon_ut.cpp |82.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_large.cpp |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |82.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_ut.cpp |83.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |82.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |82.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |82.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |82.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_partition_stats/ut_top_cpu_usage.cpp |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |82.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |82.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |82.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |82.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |82.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |82.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |82.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |82.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |82.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |82.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |82.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |82.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |82.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |82.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |82.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |82.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_backup.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |82.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |82.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_vacuum.cpp |82.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |82.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |82.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |82.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |82.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |81.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |81.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query/ut_streaming_query.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |82.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |82.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_vector_ut.cpp |82.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache_actor.cpp |82.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime ------- [LD] {default-linux-x86_64, relwithdebinfo} $(B)/yql/tools/yqlrun/yqlrun ld.lld: warning: version script assignment of 'global' to symbol '__after_morecore_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'daylight' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_environ' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__malloc_initialize_hook' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'program_invocation_short_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timezone' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__libc_start_main' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensAfter' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateHappensBefore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreWritesEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsBegin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'AnnotateIgnoreReadsEnd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'abort' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bind' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__close' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'closedir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'connect' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'creat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dup3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_create1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_ctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inotify_init1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'longjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'nanosleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'on_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrier_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_broadcast' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_cond_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_detach' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_kill' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_timedlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_once' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_rdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_timedwrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_tryrdlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_trywrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlock_wrlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_trylock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_spin_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'puts' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'raise' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__res_iclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rmdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_setjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'siglongjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signalfd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__sigsetjmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigsuspend' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socket' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpfile64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'unlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'usleep' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'accept4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'asprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'backtrace_symbols' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'bcopy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'canonicalize_file_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capget' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'capset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'cfree' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_getres' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'clock_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'confstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctermid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ctime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__cxa_atexit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dladdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dl_iterate_phdr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'dlopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'drand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'endpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_pwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'epoll_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_aton_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_hostton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_line' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntoa_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ether_ntohost' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'eventfd_write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_exit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fclose' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fdopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fflush' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgets_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'flistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fmemopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fopencookie' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fork' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'forkpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fread_unlocked' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'freopen64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'frexpl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'fstatvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__fxstatat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gcvt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getaddrinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'get_current_dir_name' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getcwd' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__getdelim' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrgid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgrnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyaddr_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname2_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostbyname_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gethostname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getifaddrs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getline' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getmntent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getnameinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpass' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpeername' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwent_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getpwuid_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresgid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getresuid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getrusage' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getsockopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gettimeofday' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'getxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'glob64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'gmtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'iconv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_indextoname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'if_nametoindex' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_aton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_ntop' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'inet_pton' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'initgroups' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ioctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_fscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__isoc99_vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammaf_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgammal_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgamma_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lgetxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'listxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'llistxattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'localtime_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'lrand48_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__lxstat64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'malloc_stats' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mallopt' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbrtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsnrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbsrtowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbstowcs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mbtowc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memccpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmem' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'memset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mincore' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mktime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'mmap64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modff' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'modfl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'munlockall' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_begin_1' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '_obstack_newchunk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'opendir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_memstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'openpty' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'open_wmemstream' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__overflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pipe2' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'poll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ppoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pread64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'preadv64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'printf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'prlimit64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'process_vm_writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getaffinity_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getdetachstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getguardsize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getinheritsched' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getschedpolicy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getscope' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstack' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_attr_getstacksize' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_barrierattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getclock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_condattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_getschedparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_join' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_key_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprioceiling' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getprotocol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_getrobust_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutexattr_gettype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_lock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_mutex_unlock' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getkind_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_rwlockattr_getpshared' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcancelstate' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setcanceltype' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pthread_setname_np' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'ptrace' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'putenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pvalloc' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwrite64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'pwritev64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'random_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'rand_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'read' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir64_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readdir_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readlink' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'readv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'realpath' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvfrom' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'recvmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'remquol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scandir64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'scanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getaffinity' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sched_getparam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_destroy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_getvalue' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_init' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_post' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_timedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_trywait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sem_wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'send' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendmsg' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sendto' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setenv' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setgrent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setitimer' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setlocale' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'setpwent' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'shmctl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigaction' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigemptyset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigfillset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'signal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigpending' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigprocmask' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigtimedwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sigwaitinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincos' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sincosl' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'snprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'socketpair' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'statvfs64' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'stpcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcasestr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strchrnul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strcspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strdup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncasecmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strncpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strndup' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strnlen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strpbrk' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strptime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strrchr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strspn' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strstr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoimax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strtoumax' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'strxfrm_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'swprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'sysinfo' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tcgetattr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tempnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'textdomain' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'time' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_gettime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'timerfd_settime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'times' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__tls_get_addr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tmpnam_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tsearch' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'tzset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__uflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'uname' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__underflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vasprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vfscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsnprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vsscanf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'vswprintf' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait3' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wait4' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'waitpid' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcrtomb' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcschr' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscmp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcscpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsftime_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcslen' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsnrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcsrtombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstod_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstof_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstold_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstol_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoll_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstombs' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoul_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_internal' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wcstoull_l' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemmove' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmempcpy' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wmemset' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'wordexp' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__woverflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'write' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'writev' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wuflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__wunderflow' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bool' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_bytes' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_double' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_enum' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_float' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_int8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrmem_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdrstdio_create' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_string' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_char' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_hyper' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_int' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint16_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint32_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint64_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_uint8_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_long' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_longlong_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_quad_t' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol 'xdr_u_short' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xpg_strerror_r' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat' failed: symbol not defined ld.lld: warning: version script assignment of 'global' to symbol '__xstat64' failed: symbol not defined |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |81.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/objcopy_9f29b589555ed64086e5eadccf.o |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_tiered_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |81.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_fulltext_ut.cpp |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rowlocks.cpp |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_prefixed_vector_ut.cpp |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |81.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |81.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred/ut_shred.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/tools/yqlrun/yqlrun |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |82.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |82.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |82.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |82.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |82.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred_reboots/ut_shred_reboots.cpp |82.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |82.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |82.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |82.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |82.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |82.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |82.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |82.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/ut_incremental_restore_reboots.cpp |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |82.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |82.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |82.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |82.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |82.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |82.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |82.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |82.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |82.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |82.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/topic_data_ut.cpp |82.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |82.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/ut/ut_utils.cpp |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |82.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |82.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |82.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |82.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |82.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |82.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |82.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |82.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |82.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |81.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |81.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |81.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/dqrun/dqrun |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dqrun/dqrun |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |81.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |81.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |81.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |81.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |81.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |81.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |81.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |81.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |81.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/arrow/libsrc-client-arrow.a |81.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |81.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/etcd_proxy/etcd_proxy |81.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |81.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |81.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |81.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |81.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |81.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/etcd_proxy/etcd_proxy |81.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |81.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |81.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |81.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |81.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |81.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |81.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |81.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |81.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |81.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |81.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |81.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |81.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |81.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |81.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |81.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |81.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/list_objects_in_s3_export_ut.cpp |81.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/ydb-tests-example |81.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |81.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/backup_path_ut.cpp |81.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/encrypted_backup_ut.cpp |81.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |81.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |81.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |81.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |81.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |81.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |81.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |81.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |81.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |81.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |81.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |81.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |81.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |80.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |81.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |81.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |80.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |80.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |80.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |80.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |80.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |78.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |78.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |78.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |78.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |78.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |73.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |73.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub |73.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |73.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |72.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |72.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |72.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |72.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |71.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/apps/pgwire/pgwire |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/pgwire/pgwire |71.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |71.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |71.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |70.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |70.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |70.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |70.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |70.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk |69.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |69.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |69.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |69.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |68.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |68.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk |68.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part17/ydb-tests-fq-yt-kqp_yt_file-part17 |67.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |66.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/cdc |66.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/libpy3olap-docs-generator.global.a |66.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part9/ydb-tests-fq-yt-kqp_yt_file-part9 |64.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize |63.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part16/ydb-tests-fq-yt-kqp_yt_file-part16 |63.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |61.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |61.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/column_table/ydb-core-transfer-ut-column_table |61.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/plan2svg/ydb-tests-functional-kqp-plan2svg |61.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part13/ydb-tests-fq-yt-kqp_yt_file-part13 |61.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |61.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |61.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |61.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart |61.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part14/ydb-tests-fq-yt-kqp_yt_file-part14 |61.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |60.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |60.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/locks_db.cpp |60.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/locks_db.cpp |60.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_finish_async.cpp |60.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_start_from_initiator.cpp |60.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_finish_async.cpp |60.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_start_from_initiator.cpp |59.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |59.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |59.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |59.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard_subdomain_path_id.cpp |59.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc.cpp |59.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |59.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |59.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_subdomain_path_id.cpp |59.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/gc.cpp |59.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |59.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |59.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |59.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/manager/sessions.cpp |59.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpcc/ydb-tests-functional-tpcc |59.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |59.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |58.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.cpp |58.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/abstract.cpp |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/ut/ydb-apps-ydb-ut |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |58.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |58.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/generator |58.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/ttl.cpp |58.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part10/ydb-tests-fq-yt-kqp_yt_file-part10 |58.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/primary.cpp |58.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part0/ydb-tests-fq-yt-kqp_yt_file-part0 |58.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |58.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |58.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/ttl.cpp |58.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |58.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/session/destination.cpp |58.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/primary.cpp |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |58.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__write_index.cpp |58.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/delete/ydb-tests-olap-delete |58.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |58.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/remove_portions.cpp |58.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__write_index.cpp |58.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__write.cpp |58.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part5/ydb-tests-fq-yt-kqp_yt_file-part5 |58.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |57.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/abstract/remove_portions.cpp |57.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/reconfig_state_storage_workload |57.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/node_broker |57.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_add_sharding_info.cpp |57.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |57.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/abstract.cpp |57.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |57.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup_collection/ydb-tests-functional-backup_collection |57.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |57.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/backup.cpp |57.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__propose_cancel.cpp |57.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__propose_transaction.cpp |57.7%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/abstract.cpp |57.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |57.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part19/ydb-tests-fq-yt-kqp_yt_file-part19 |57.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__propose_cancel.cpp |57.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/backup.cpp |57.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__write.cpp |57.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__propose_transaction.cpp |57.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/clickbench/ydb-tests-functional-clickbench |57.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/manager/sessions.cpp |57.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |57.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_finish_ack_to_source.cpp |57.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/inflight_request_tracker.cpp |57.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_finish_ack_to_source.cpp |57.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/tablet/ext_tx_base.cpp |57.0%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/hooks/abstract/abstract.cpp |56.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |56.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |57.0%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |56.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |56.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/inflight_request_tracker.cpp |56.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |56.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/bs/write.cpp |56.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |56.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |56.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_to_source.cpp |56.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/reservoir_sampling/libreservoir_sampling_udf.so |56.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |56.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |56.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/propose_tx.cpp |56.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |56.7%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc.cpp |56.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |56.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |56.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |56.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/session/destination.cpp |56.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__scan.cpp |56.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__notify_tx_completion.cpp |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/viewer |56.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/modification/tasks/modification.cpp |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |56.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/transactions/tx_add_sharding_info.cpp |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/compatibility/binaries/downloader/libpy3compatibility-binaries-downloader.global.a |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |56.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |56.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/simple.cpp |56.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |56.6%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |56.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/tables_manager.cpp |56.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part1/ydb-tests-fq-yt-kqp_yt_file-part1 |56.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/write.cpp |56.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/modification/transactions/tx_change_blobs_owning.cpp |56.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/writer/indexed_blob_constructor.cpp |56.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/schema.cpp |56.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/tablet/ext_tx_base.cpp |56.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/abstract/gc.cpp >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly |56.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/source.cpp >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly [GOOD] |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/reconfig_state_storage_workload/tests/stress-reconfig_state_storage_workload-tests |56.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/propose_tx.cpp |56.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |56.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_source_cursor.cpp >> TStateStorageRingGroupState::TestStateStorageUpdateSigConfigVersionChanged |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming/ydb-tests-fq-streaming |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |56.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__scan.cpp >> TStateStorageRingGroupState::TestStateStorageUpdateSigConfigVersionChanged [GOOD] |56.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__notify_tx_completion.cpp |56.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard.cpp |56.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent |56.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__plan_step.cpp >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent [GOOD] |56.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/simple.cpp |56.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/storage.cpp |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so |56.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard_impl.cpp |56.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/indexed_blob_constructor.cpp |56.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/write_actor.cpp |56.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/write.cpp |56.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestProxyConfigMismatchNotSent [GOOD] Test command err: RandomSeed# 8564433158533712430 2025-11-03T14:00:48.502736Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [9:276:20] SessionId# [1:151:8] Cookie# 11933093888780358623 2025-11-03T14:00:48.502763Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 9 SessionId# [1:151:8] Inserted# false Subscription# {SessionId# [1:151:8] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-11-03T14:00:48.504292Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 9 Cookie# 11933093888780358623 SessionId# [1:151:8] Binding# {9.6/11933093888780358623@[1:151:8]} Record# {RootNodeId: 1 } 2025-11-03T14:00:48.504316Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC03@distconf_binding.cpp:366} AbortBinding Binding# {9.6/11933093888780358623@[1:151:8]} Reason# binding cycle 2025-11-03T14:00:48.504327Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC24@distconf_scatter_gather.cpp:87} AbortAllScatterTasks Binding# {9.6/11933093888780358623@[1:151:8]} 2025-11-03T14:00:48.504341Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:314} UnsubscribeInterconnect NodeId# 9 Subscription# {SessionId# [1:151:8] SubscriptionCookie# 0} 2025-11-03T14:00:48.504351Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 6 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-11-03T14:00:48.504360Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:180} Initiated bind NodeId# 6 Binding# {6.0/11933093888780358624@[0:0:0]} SessionId# [0:0:0] 2025-11-03T14:00:48.504388Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [3:234:20] SessionId# [1:133:2] Cookie# 293207500275445729 2025-11-03T14:00:48.504395Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [1:133:2] Inserted# false Subscription# {SessionId# [1:133:2] SubscriptionCookie# 0} NextSubscribeCookie# 7 2025-11-03T14:00:48.504432Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:551} TEvNodeConfigPush NodeId# 3 Cookie# 293207500275445729 SessionId# [1:133:2] Binding# {6.0/11933093888780358624@[0:0:0]} Record# {BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032>> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob |56.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_scan.cpp |56.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/schema.cpp >> TStateStorageRingGroupState::TestProxyNotifyReplicaConfigChanged1 [GOOD] >> BSCReadOnlyPDisk::ReadOnlyOneByOne |56.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |56.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/sync.cpp |56.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/actor/export_actor.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnly [GOOD] Test command err: RandomSeed# 6058555862987449464 2025-11-03T14:00:47.992587Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639240 Sender# [3:249:20] SessionId# [1:133:2] Cookie# 4165461173062309592 2025-11-03T14:00:47.992616Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [1:133:2] Inserted# false Subscription# {SessionId# [1:133:2] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-11-03T14:00:47.992625Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:711} TEvNodeConfigUnbind NodeId# 3 Cookie# 4165461173062309592 SessionId# [1:133:2] Binding# 2025-11-03T14:00:47.992634Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:720} UnbindNode NodeId# 3 Reason# explicit unbind request 2025-11-03T14:00:47.992640Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.6:19001/6 2025-11-03T14:00:47.992647Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.7:19001/7 2025-11-03T14:00:47.992653Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.9:19001/9 2025-11-03T14:00:47.992658Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.4:19001/4 2025-11-03T14:00:47.992664Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.1:19001/1 2025-11-03T14:00:47.992669Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.8:19001/8 2025-11-03T14:00:47.992688Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.2:19001/2 2025-11-03T14:00:47.992694Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.5:19001/5 2025-11-03T14:00:47.992699Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.3:19001/3 2025-11-03T14:00:47.992710Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:314} UnsubscribeInterconnect NodeId# 3 Subscription# {SessionId# [1:133:2] SubscriptionCookie# 0} 2025-11-03T14:00:47.992722Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-11-03T14:00:47.992731Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:180} Initiated bind NodeId# 3 Binding# {3.0/15653321866632274120@[0:0:0]} SessionId# [0:0:0] 2025-11-03T14:00:47.992752Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:249:20] SessionId# [8:101:3] Cookie# 8746419773760078528 2025-11-03T14:00:47.992759Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [8:101:3] Inserted# false Subscription# {SessionId# [8:101:3] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:00:47.994378Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 8746419773760078528 SessionId# [8:101:3] Binding# {3.2/8746419773760078528@[8:101:3]} Record# {RootNodeId: 3 } 2025-11-03T14:00:47.994426Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:249:20] SessionId# [4:89:3] Cookie# 15984536436331510631 2025-11-03T14:00:47.994440Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [4:89:3] Inserted# false Subscription# {SessionId# [4:89:3] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:00:47.994456Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 15984536436331510631 SessionId# [4:89:3] Binding# {3.2/15984536436331510631@[4:89:3]} Record# {RootNodeId: 3 } 2025-11-03T14:00:47.994475Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:249:20] SessionId# [5:92:3] Cookie# 320038556866895417 2025-11-03T14:00:47.994481Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [5:92:3] Inserted# false Subscription# {SessionId# [5:92:3] SubscriptionCookie# 0} NextSubscribeCookie# 7 2025-11-03T14:00:47.994490Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 320038556866895417 SessionId# [5:92:3] Binding# {3.2/320038556866895417@[5:92:3]} Record# {RootNodeId: 3 } 2025-11-03T14:00:47.994527Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 131077 Sender# [2:131:1] SessionId# [0:0:0] Cookie# 5 2025-11-03T14:00:47.994536Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:215} TEvNodeConnected NodeId# 1 SessionId# [2:131:1] Cookie# 5 CookieInFlight# true SubscriptionExists# true 2025-11-03T14:00:47.994544Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:251} Continuing bind Binding# {1.0/15868754024624769355@[0:0:0]} 2025-11-03T14:00:47.994559Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [5:263:20] SessionId# [3:91:4] Cookie# 320038556866895417 2025-11-03T14:00:47.994566Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [3:91:4] Inserted# false Subscription# {SessionId# [3:91:4] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-11-03T14:00:47.994600Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:551} TEvNodeConfigPush NodeId# 5 Cookie# 320038556866895417 SessionId# [3:91:4] Binding# Record# {DeletedBoundNodeIds { Host: "127.0.0.4" Port: 19001 NodeId: 4 } DeletedBoundNodeIds { Host: "127.0.0.1" Port: 19001 NodeId: 1 } DeletedBoundNodeIds { Host: "127.0.0.8" Port: 19001 NodeId: 8 } DeletedBoundNodeIds { Host: "127.0.0.2" Port: 19001 NodeId: 2 } DeletedBoundNodeIds { Host: "127.0.0.3" Port: 19001 NodeId: 3 } } RootNodeId# 3 2025-11-03T14:00:47.994610Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [3:91:4] Inserted# false Subscription# {SessionId# [3:91:4] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-11-03T14:00:47.994617Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.4:19001/4 2025-11-03T14:00:47.994640Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.1:19001/1 2025-11-03T14:00:47.994648Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.8:19001/8 2025-11-03T14:00:47.994653Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.2:19001/2 2025-11-03T14:00:47.994659Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.3:19001/3 2025-11-03T14:00:47.994672Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-11-03T14:00:47.994680Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:180} Initiated bind NodeId# 2 Binding# {2.0/4165461173062309593@[0:0:0]} SessionId# [0:0:0] 2025-11-03T14:00:47.994756Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 131077 Sender# [1:133:2] SessionId# [0:0:0] Cookie# 5 2025-11-03T14:00:47.994764Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:215} TEvNodeConnected NodeId# 3 SessionId# [1:133:2] Cookie# 5 CookieInFlight# true SubscriptionExists# true 2025-11-03T14:00:47.994773Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:251} Continuing bind Binding# {3.0/15653321866632274120@[0:0:0]} 2025-11-03T14:00:47.994810Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 131077 Sender# [3:109:2] SessionId# [0:0:0] Cookie# 6 2025-11-03T14:00:47.994818Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:215} TEvNodeConnected NodeId# 2 SessionId# [3:109:2] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2025-11-03T14:00:47.994824Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:251} Continuing bind Binding# {2.0/4165461173062309593@[0:0:0]} 2025-11-03T14:00:47.994845Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [5:263:20] SessionId# [7:58:5] Cookie# 12590323539245558387 2025-11-03T14:00:47.994853Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [7:58:5] Inserted# false Subscription# {SessionId# [7:58:5] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:00:47.994867Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 5 Cookie# 12590323539245558387 SessionId# [7:58:5] Binding# {5.2/12590323539245558387@[7:58:5]} Record# {RootNodeId: 3 } 2025-11-03T14:00:47.994880Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [5:263:20] SessionId# [9:64:5] Cookie# 16519569055427154222 2025-11-03T14:00:47.994886Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [9:64:5] Inserted# false Subscription# {SessionId# [9:64:5] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:00:47.994894Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 5 Cookie# 16519569055427154222 SessionId# [9:64:5] Binding# {5.2/16519569055427154222@[9:64:5]} Record# {RootNodeId: 3 } 2025-11-03T14:00:47.994904Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [5:263:20] SessionId# [6:55:5] Cookie# 12657173176014310147 2025-11-03T14:00:47.994910Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [6:55:5] Inserted# false Subscription# {SessionId# [6:55:5] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:00:47.994918Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 5 Cookie# 12657173176014310147 SessionId# [6:55:5] Binding# {5.2/12657173176014310147@[6:55:5]} Record# {RootNodeId: 3 } 2025-11-03T14:00:47.995104Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [2:242:20] SessionId# [1:130:1] Cookie# 15868754024624769355 2025-11-03T14:00:47.995131Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:551} TEvNodeConfigPush NodeId# 2 Cookie# 15868754024624769355 SessionId# [1:130:1] Binding# {3.0/15653321866632274120@[1:133:2]} Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.2" Port: 19001 NodeId: 2 } Meta { Fingerprint: "\3403\207\365\032>> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] |56.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so |56.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/tables_manager.cpp |56.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_to_source.cpp |56.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/bs/write.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestStateStorageUpdateSigConfigVersionChanged [GOOD] Test command err: RandomSeed# 11024214687512265808 2025-11-03T14:00:48.263137Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 131077 Sender# [5:115:2] SessionId# [0:0:0] Cookie# 7 2025-11-03T14:00:48.263163Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:215} TEvNodeConnected NodeId# 2 SessionId# [5:115:2] Cookie# 7 CookieInFlight# true SubscriptionExists# true 2025-11-03T14:00:48.263173Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:251} Continuing bind Binding# {2.0/12068275481579747740@[0:0:0]} 2025-11-03T14:00:48.263198Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [5:248:20] SessionId# [3:91:4] Cookie# 12068275481579747739 2025-11-03T14:00:48.263209Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [3:91:4] Inserted# false Subscription# {SessionId# [3:91:4] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-11-03T14:00:48.264770Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:551} TEvNodeConfigPush NodeId# 5 Cookie# 12068275481579747739 SessionId# [3:91:4] Binding# {1.1/17701296966411438847@[3:134:1]} Record# {DeletedBoundNodeIds { Host: "127.0.0.7" Port: 19001 NodeId: 7 } DeletedBoundNodeIds { Host: "127.0.0.4" Port: 19001 NodeId: 4 } DeletedBoundNodeIds { Host: "127.0.0.1" Port: 19001 NodeId: 1 } DeletedBoundNodeIds { Host: "127.0.0.8" Port: 19001 NodeId: 8 } DeletedBoundNodeIds { Host: "127.0.0.2" Port: 19001 NodeId: 2 } DeletedBoundNodeIds { Host: "127.0.0.3" Port: 19001 NodeId: 3 } } RootNodeId# 1 2025-11-03T14:00:48.264805Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [3:91:4] Inserted# false Subscription# {SessionId# [3:91:4] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-11-03T14:00:48.264814Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.7:19001/7 2025-11-03T14:00:48.264821Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.4:19001/4 2025-11-03T14:00:48.264829Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.1:19001/1 2025-11-03T14:00:48.264836Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.8:19001/8 2025-11-03T14:00:48.264842Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.2:19001/2 2025-11-03T14:00:48.264847Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.3:19001/3 2025-11-03T14:00:48.264890Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [7:98:3] Cookie# 14242577342421885419 2025-11-03T14:00:48.264899Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [7:98:3] Inserted# false Subscription# {SessionId# [7:98:3] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:00:48.264916Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 14242577342421885419 SessionId# [7:98:3] Binding# {3.5/14242577342421885419@[7:98:3]} Record# {RootNodeId: 1 } 2025-11-03T14:00:48.264932Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [8:101:3] Cookie# 3001816405852126022 2025-11-03T14:00:48.264939Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [8:101:3] Inserted# false Subscription# {SessionId# [8:101:3] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:00:48.264948Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 3001816405852126022 SessionId# [8:101:3] Binding# {3.5/3001816405852126022@[8:101:3]} Record# {RootNodeId: 1 } 2025-11-03T14:00:48.264961Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [5:92:3] Cookie# 12068275481579747739 2025-11-03T14:00:48.264973Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 12068275481579747739 SessionId# [5:92:3] Binding# {2.0/12068275481579747740@[5:115:2]} Record# {RootNodeId: 1 } 2025-11-03T14:00:48.265003Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639240 Sender# [5:248:20] SessionId# [3:91:4] Cookie# 12068275481579747739 2025-11-03T14:00:48.265009Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [3:91:4] Inserted# false Subscription# {SessionId# [3:91:4] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-11-03T14:00:48.265017Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:711} TEvNodeConfigUnbind NodeId# 5 Cookie# 12068275481579747739 SessionId# [3:91:4] Binding# {1.1/17701296966411438847@[3:134:1]} 2025-11-03T14:00:48.265025Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:720} UnbindNode NodeId# 5 Reason# explicit unbind request 2025-11-03T14:00:48.265031Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.6:19001/6 2025-11-03T14:00:48.265037Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.9:19001/9 2025-11-03T14:00:48.265043Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.5:19001/5 2025-11-03T14:00:48.265054Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:314} UnsubscribeInterconnect NodeId# 5 Subscription# {SessionId# [3:91:4] SubscriptionCookie# 0} 2025-11-03T14:00:48.265070Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [5:248:20] SessionId# [9:64:5] Cookie# 12648970896112509817 2025-11-03T14:00:48.265077Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [9:64:5] Inserted# false Subscription# {SessionId# [9:64:5] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:00:48.265087Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 5 Cookie# 12648970896112509817 SessionId# [9:64:5] Binding# {5.1/12648970896112509817@[9:64:5]} Record# {RootNodeId: 5 } 2025-11-03T14:00:48.265100Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [5:248:20] SessionId# [6:55:5] Cookie# 3175954885478706515 2025-11-03T14:00:48.265106Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [6:55:5] Inserted# false Subscription# {SessionId# [6:55:5] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:00:48.265114Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 5 Cookie# 3175954885478706515 SessionId# [6:55:5] Binding# {5.1/3175954885478706515@[6:55:5]} Record# {RootNodeId: 5 } 2025-11-03T14:00:48.265195Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [5:248:20] SessionId# [2:114:4] Cookie# 12068275481579747740 2025-11-03T14:00:48.265234Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:551} TEvNodeConfigPush NodeId# 5 Cookie# 12068275481579747740 SessionId# [2:114:4] Binding# {1.1/8183399739677641845@[2:131:1]} Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032> Record# {DeletedBoundNodeIds { Host: "127.0.0.4" Port: 19001 NodeId: 4 } DeletedBoundNodeIds { Host: "127.0.0.1" Port: 19001 NodeId: 1 } DeletedBoundNodeIds { Host: "127.0.0.2" Port: 19001 NodeId: 2 } } RootNodeId# 1 2025-11-03T14:00:48.265325Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [1:133:2] Inserted# false Subscription# {SessionId# [1:133:2] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-11-03T14:00:48.265331Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.4:19001/4 2025-11-03T14:00:48.265337Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.1:19001/1 2025-11-03T14:00:48.265342Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.2:19001/2 2025-11-03T14:00:48.265543Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [3:234:20] SessionId# [1:133:2] Cookie# 17701296966411438847 2025-11-03T14:00:48.265554Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [1:133:2] Inserted# false Subscription# {SessionId# [1:133:2] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-11-03T14:00:48.265577Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:551} TEvNodeConfigPush NodeId# 3 Cookie# 17701296966411438847 SessionId# [1:133:2] Binding# Record# {DeletedBoundNodeIds { Host: "127.0.0.6" Port: 19001 NodeId: 6 } DeletedBoundNodeIds { Host: "127.0.0.9" Port: 19001 NodeId: 9 } DeletedBoundNodeIds { Host: "127.0.0.5" Port: 19001 NodeId: 5 } } RootNodeId# 1 2025-11-03T14:00:48.265583Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [1:133:2] Inserted# false Subscription# {SessionId# [1:133:2] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-11-03T14:00:48.265591Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.6:19001/6 2025-11-03T14:00:48.265597 ... 03T14:00:48.276561Z 1 00h00m03.776870s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:00:48.276567Z 1 00h00m03.776870s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:00:48.276573Z 1 00h00m03.776870s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.276580Z 1 00h00m03.776870s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.276586Z 1 00h00m03.776870s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.276592Z 1 00h00m03.776870s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.276597Z 1 00h00m03.776870s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.277522Z 1 00h00m07.901516s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:00:48.277541Z 1 00h00m07.901516s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:00:48.277548Z 1 00h00m07.901516s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:00:48.277554Z 1 00h00m07.901516s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:00:48.277560Z 1 00h00m07.901516s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:00:48.277566Z 1 00h00m07.901516s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:00:48.277573Z 1 00h00m07.901516s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.277581Z 1 00h00m07.901516s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.277586Z 1 00h00m07.901516s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.277592Z 1 00h00m07.901516s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.277600Z 1 00h00m07.901516s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.278376Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-11-03T14:00:48.278394Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-11-03T14:00:48.278403Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:00:48.278409Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-11-03T14:00:48.278415Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-11-03T14:00:48.278420Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-11-03T14:00:48.278432Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:48.278441Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:48.278448Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:48.278458Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:48.278469Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 10 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:48.278474Z 1 00h00m10.002048s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=10 Info->ClusterStateGuid=0 clusterStateGuid=0 2025-11-03T14:00:48.278489Z 1 00h00m10.002048s :BS_NODE INFO: {NW51@node_warden_resource.cpp:362} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 10 ClusterStateGuid: 0 2025-11-03T14:00:48.279879Z 1 00h00m16.233300s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:00:48.279901Z 1 00h00m16.233300s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:00:48.279908Z 1 00h00m16.233300s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:00:48.279913Z 1 00h00m16.233300s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:00:48.279919Z 1 00h00m16.233300s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:00:48.279925Z 1 00h00m16.233300s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:00:48.279935Z 1 00h00m16.233300s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.279943Z 1 00h00m16.233300s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.279949Z 1 00h00m16.233300s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.279955Z 1 00h00m16.233300s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.279960Z 1 00h00m16.233300s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.283608Z 1 00h00m32.896868s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:00:48.283635Z 1 00h00m32.896868s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:00:48.283645Z 1 00h00m32.896868s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:00:48.283651Z 1 00h00m32.896868s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:00:48.283657Z 1 00h00m32.896868s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:00:48.283663Z 1 00h00m32.896868s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:00:48.283671Z 1 00h00m32.896868s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.283679Z 1 00h00m32.896868s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.283685Z 1 00h00m32.896868s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.283691Z 1 00h00m32.896868s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.283697Z 1 00h00m32.896868s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.291457Z 1 00h01m07.890360s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:00:48.291494Z 1 00h01m07.890360s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:00:48.291505Z 1 00h01m07.890360s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:00:48.291511Z 1 00h01m07.890360s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:00:48.291517Z 1 00h01m07.890360s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:00:48.291523Z 1 00h01m07.890360s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:00:48.291532Z 1 00h01m07.890360s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.291545Z 1 00h01m07.890360s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.291551Z 1 00h01m07.890360s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.291557Z 1 00h01m07.890360s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:48.291563Z 1 00h01m07.890360s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} |56.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |56.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_start_source_cursor.cpp |56.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |56.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestProxyNotifyReplicaConfigChanged1 [GOOD] Test command err: RandomSeed# 5632332250949759820 2025-11-03T14:00:49.888911Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [6:95:3] Cookie# 883510239394622925 2025-11-03T14:00:49.888939Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [6:95:3] Inserted# false Subscription# {SessionId# [6:95:3] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:00:49.890479Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 883510239394622925 SessionId# [6:95:3] Binding# {3.1/883510239394622925@[6:95:3]} Record# {RootNodeId: 7 } 2025-11-03T14:00:49.890524Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [4:241:20] SessionId# [9:83:4] Cookie# 14150860348610915827 2025-11-03T14:00:49.890536Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 4 SessionId# [9:83:4] Inserted# false Subscription# {SessionId# [9:83:4] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:00:49.890550Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 4 Cookie# 14150860348610915827 SessionId# [9:83:4] Binding# {4.1/14150860348610915827@[9:83:4]} Record# {RootNodeId: 7 } 2025-11-03T14:00:49.890564Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [4:241:20] SessionId# [5:71:4] Cookie# 6087394466108442433 2025-11-03T14:00:49.890571Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 4 SessionId# [5:71:4] Inserted# false Subscription# {SessionId# [5:71:4] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:00:49.890580Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 4 Cookie# 6087394466108442433 SessionId# [5:71:4] Binding# {4.1/6087394466108442433@[5:71:4]} Record# {RootNodeId: 7 } 2025-11-03T14:00:49.890821Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [5:248:20] SessionId# [8:61:5] Cookie# 13615461472658474272 2025-11-03T14:00:49.890833Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [8:61:5] Inserted# false Subscription# {SessionId# [8:61:5] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:00:49.890846Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 5 Cookie# 13615461472658474272 SessionId# [8:61:5] Binding# {5.1/13615461472658474272@[8:61:5]} Record# {RootNodeId: 7 } 2025-11-03T14:00:49.892207Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:00:49.892227Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:00:49.892233Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:00:49.892240Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:00:49.892246Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:00:49.892873Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639257 Sender# [1:310:42] SessionId# [0:0:0] Cookie# 0 2025-11-03T14:00:49.892948Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:00:49.892974Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [1:220:36] SessionId# [7:146:1] Cookie# 10999778678444910106 2025-11-03T14:00:49.892985Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [7:146:1] Inserted# false Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-11-03T14:00:49.893009Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:551} TEvNodeConfigPush NodeId# 1 Cookie# 10999778678444910106 SessionId# [7:146:1] Binding# Record# {CacheUpdate { } } RootNodeId# 7 2025-11-03T14:00:49.893017Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [7:146:1] Inserted# false Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-11-03T14:00:49.893036Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 17631028317084675340 2025-11-03T14:00:49.893043Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-11-03T14:00:49.893058Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 17631028317084675340 SessionId# [2:131:1] Binding# {1.7/17631028317084675340@[2:131:1]} Record# {RootNodeId: 7 CacheUpdate { } } 2025-11-03T14:00:49.893077Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:00:49.893085Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:00:49.893091Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:00:49.893097Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:00:49.893102Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:00:49.893122Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:49.893130Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:49.893136Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:49.893142Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:49.893148Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:49.893169Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [7:262:20] SessionId# [1:145:6] Cookie# 10999778678444910106 2025-11-03T14:00:49.893176Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 7 SessionId# [1:145:6] Inserted# false Subscription# {SessionId# [1:145:6] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-11-03T14:00:49.893187Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 7 Cookie# 10999778678444910106 SessionId# [1:145:6] Binding# {7.7/10999778678444910106@[1:145:6]} Record# {RootNodeId: 7 CacheUpdate { } } 2025-11-03T14:00:49.893201Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [3:109:2] Cookie# 6764614390803071534 2025-11-03T14:00:49.893207Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [3:109:2] Inserted# false Subscription# {SessionId# [3:109:2] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:00:49.893218Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 2 Cookie# 6764614390803071534 SessionId# [3:109:2] Binding# {2.7/6764614390803071534@[3:109:2]} Record# {RootNodeId: 7 CacheUpdate { } } 2025-11-03T14:00:49.893231Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [4:112:2] Cookie# 11253356641842606868 2025-11-03T14:00:49.893237Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [4:112:2] Inserted# false Subscription# {SessionId# [4:112:2] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-11-03T14:00:49.893245Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 2 Cookie# 11253356641842606868 SessionId# [4:112:2] Binding# {2.7/11253356641842606868@[4:112:2]} Record# {RootNodeId: 7 CacheUpdate { } } 2025-11-03T14:00:49.893293Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 17631028317084675340 2025-11-03T14:00:49.893299Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-11-03T14:00:49.893308Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 17631028317084675340 SessionId# [2:131:1] Binding# {1.7/17631028317084675340@[2:131:1]} Record# {RootNodeId: 7 CacheUpdate { } } 2025-11-03T14:00:49.893323Z 6 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [6:95:3] Cookie# 883510239394622925 2025-11-03T14:00:49.893329Z 6 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [6:95:3] Inserted# false Subscription# {SessionId# [6:95:3] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:00:49.893338Z 6 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 883510239394622925 SessionId# [6:95:3] Binding# {3.7/883510239394622925@[6:95:3]} Record# {RootNodeId: 7 CacheUpdate { } } 2025-11-03T14:00:49.893350Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [4:241:20] SessionId# [9:83:4] Cookie# 14150860348610915827 2025-11-03T14:00:49.893356Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 4 SessionId# [9:83:4] Inserted# false Subscription# {SessionId# [9:83:4] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:00:49.893365Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 4 Cookie# 14150860348610915827 SessionId# [9:83:4] Binding# {4.7/14150860348610915827@[9:83:4]} Record# {RootNodeId: 7 CacheUpdate { } } 2025-11-03T14:00:49.893377Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [4:241:20] SessionId# [5:71:4] Cookie# 6087394466108442433 2025-11-03T14:00:49.893383Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 4 SessionId# [5:71:4] Inserted# false Subscription# {SessionId# [5:71:4] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:00:49.893391Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 4 Cookie# 6087394466108442433 SessionId# [5:71:4] Binding# {4.7/6087394466108442433@[5:71 ... ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:49.904740Z 1 00h00m15.466986s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:00:49.904760Z 1 00h00m15.466986s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:00:49.904768Z 1 00h00m15.466986s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:00:49.904774Z 1 00h00m15.466986s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:00:49.904780Z 1 00h00m15.466986s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:00:49.904786Z 1 00h00m15.466986s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:00:49.904793Z 1 00h00m15.466986s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:49.904801Z 1 00h00m15.466986s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:49.904810Z 1 00h00m15.466986s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:49.904816Z 1 00h00m15.466986s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:49.904822Z 1 00h00m15.466986s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:49.906007Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:00:49.906026Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-11-03T14:00:49.906034Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:00:49.906039Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-11-03T14:00:49.906044Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-11-03T14:00:49.906050Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-11-03T14:00:49.906060Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:49.906070Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:49.906077Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:49.908450Z 1 00h00m30.162241s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:00:49.908473Z 1 00h00m30.162241s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-11-03T14:00:49.908482Z 1 00h00m30.162241s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:00:49.908488Z 1 00h00m30.162241s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 msgGeneration=0 Info->ClusterStateGuid=2 msgGuid=0 2025-11-03T14:00:49.908499Z 1 00h00m30.162241s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-11-03T14:00:49.908505Z 1 00h00m30.162241s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-11-03T14:00:49.908510Z 1 00h00m30.162241s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-11-03T14:00:49.908520Z 1 00h00m30.162241s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:49.908532Z 1 00h00m30.162241s :BS_NODE INFO: {NW51@node_warden_resource.cpp:362} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 0 ClusterStateGuid: 0 2025-11-03T14:00:49.908544Z 1 00h00m30.162241s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 2 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:49.908550Z 1 00h00m30.162241s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=0 Info->ClusterStateGuid=0 clusterStateGuid=2 2025-11-03T14:00:49.908560Z 1 00h00m30.162241s :BS_NODE INFO: {NW51@node_warden_resource.cpp:362} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 0 ClusterStateGuid: 2 2025-11-03T14:00:49.909066Z 1 00h00m32.449683s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:00:49.909086Z 1 00h00m32.449683s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:00:49.909093Z 1 00h00m32.449683s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:00:49.909099Z 1 00h00m32.449683s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:00:49.909105Z 1 00h00m32.449683s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:00:49.909111Z 1 00h00m32.449683s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:00:49.909118Z 1 00h00m32.449683s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:49.909124Z 1 00h00m32.449683s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:49.909130Z 1 00h00m32.449683s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:49.909135Z 1 00h00m32.449683s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:49.909141Z 1 00h00m32.449683s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:00:49.910954Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:00:49.910976Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-11-03T14:00:49.910985Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:00:49.910991Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-11-03T14:00:49.910997Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-11-03T14:00:49.911003Z 1 00h00m40.200000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-11-03T14:00:49.911013Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:49.911022Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:49.911032Z 1 00h00m40.200000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:49.913310Z 1 00h00m50.300000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:00:49.913331Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-11-03T14:00:49.913340Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:00:49.913346Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-11-03T14:00:49.913350Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-11-03T14:00:49.913357Z 1 00h00m50.300000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-11-03T14:00:49.913367Z 1 00h00m50.300000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:49.913377Z 1 00h00m50.300000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 1 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:00:49.913383Z 1 00h00m50.300000s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=0 2025-11-03T14:00:49.913398Z 1 00h00m50.300000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:362} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 0 |56.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__plan_step.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc_HugeBlob [GOOD] Test command err: RandomSeed# 9774370661613146595 SEND TEvPut with key [1:1:1:0:0:533504:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:533504:0] 2025-11-03T14:00:50.153539Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:533504:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction |56.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_data_ack_to_source.cpp |56.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |56.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |56.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/write_actor.cpp |56.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__locks.cpp |56.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/sync.cpp |56.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |56.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |56.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_portions.cpp >> CheckIntegrityBlock42::DataErrorAdditionalUnequalParts |56.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/transfer |56.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_ack_from_initiator.cpp |56.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/constructor.cpp |56.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |56.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__init.cpp >> CheckIntegrityBlock42::DataErrorAdditionalUnequalParts [GOOD] >> CheckIntegrityBlock42::DataErrorSixPartsOneBroken >> CheckIntegrityMirror3dc::PlacementOkWithErrors >> CheckIntegrityBlock42::DataErrorSixPartsOneBroken [GOOD] >> CheckIntegrityBlock42::DataErrorFivePartsOneBroken |56.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/common/session/common.cpp >> BSCReadOnlyPDisk::ReadOnlySlay >> CheckIntegrityMirror3dc::PlacementBlobIsLost >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob >> CheckIntegrityMirror3dc::PlacementOkWithErrors [GOOD] >> CheckIntegrityBlock42::DataErrorFivePartsOneBroken [GOOD] >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken >> CheckIntegrityMirror3dc::PlacementOkWithErrorsOnBlobDisks |56.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a >> CheckIntegrityMirror3dc::PlacementBlobIsLost [GOOD] >> CheckIntegrityMirror3dc::PlacementDisintegrated >> CheckIntegrityMirror3dc::PlacementOkWithErrorsOnBlobDisks [GOOD] >> CheckIntegrityMirror3of4::PlacementBlobIsLost |56.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |56.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a >> CheckIntegrityMirror3dc::PlacementDisintegrated [GOOD] >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken [GOOD] >> CheckIntegrityMirror3dc::DataOk >> CheckIntegrityMirror3dc::PlacementOk |56.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__locks.cpp |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |56.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__overload.cpp >> CheckIntegrityMirror3dc::DataOk [GOOD] >> CheckIntegrityMirror3of4::PlacementBlobIsLost [GOOD] >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] |56.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__statistics.cpp |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |56.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |56.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_portions.cpp >> CheckIntegrityMirror3dc::PlacementOk [GOOD] |56.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_ack_from_initiator.cpp |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> CheckIntegrityMirror3dc::PlacementOkHandoff >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] >> CheckIntegrityMirror3dc::PlacementOkHandoff [GOOD] |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3of4::PlacementBlobIsLost [GOOD] Test command err: RandomSeed# 13021606362008802615 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:2:0] FINISHED WITH OK *** >> CheckIntegrityMirror3dc::PlacementMissingParts >> CheckIntegrityMirror3dc::PlacementMissingParts [GOOD] |56.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/export/session/session.cpp |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 3369734484796664226 SEND TEvPut with key [1:1:1:0:0:3201024:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:3201024:0] 2025-11-03T14:00:52.958128Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:194:17] ServerId# [1:304:64] TabletId# 72057594037932033 PipeClientId# [3:194:17] 2025-11-03T14:00:52.958185Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:229:17] ServerId# [1:309:69] TabletId# 72057594037932033 PipeClientId# [8:229:17] 2025-11-03T14:00:52.958212Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:215:17] ServerId# [1:307:67] TabletId# 72057594037932033 PipeClientId# [6:215:17] 2025-11-03T14:00:52.958237Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:208:17] ServerId# [1:306:66] TabletId# 72057594037932033 PipeClientId# [5:208:17] 2025-11-03T14:00:52.958266Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:201:17] ServerId# [1:305:65] TabletId# 72057594037932033 PipeClientId# [4:201:17] 2025-11-03T14:00:52.958291Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:187:17] ServerId# [1:303:63] TabletId# 72057594037932033 PipeClientId# [2:187:17] 2025-11-03T14:00:52.958347Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:222:17] ServerId# [1:308:68] TabletId# 72057594037932033 PipeClientId# [7:222:17] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction |56.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a >> ClusterBalancing::ClusterBalancingEvenDistributionNotPossible ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3dc::DataOk [GOOD] Test command err: RandomSeed# 6391811758199130524 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** Group is disintegrated or has network problems *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** Disks: 0: [82000000:1:1:0:0] 1: [82000000:1:2:0:0] 2: [82000000:1:0:0:0] 3: [82000000:1:1:1:0] 4: [82000000:1:2:1:0] 5: [82000000:1:0:1:0] 6: [82000000:1:1:2:0] 7: [82000000:1:2:2:0] 8: [82000000:1:0:2:0] Layout info: ver0 disks [ 0 1 2 ] |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |56.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard.cpp |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |56.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |56.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/sharing.cpp |56.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/slice_builder/pack_builder.cpp |56.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlySlay [GOOD] Test command err: RandomSeed# 10221548394071661038 2025-11-03T14:00:52.854829Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:52.855104Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 7759306511202062152] 2025-11-03T14:00:52.855859Z 1 00h01m14.361536s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> ClusterBalancing::ClusterBalancingEvenDistribution ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3dc::PlacementMissingParts [GOOD] Test command err: RandomSeed# 16142007490610362506 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |56.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/compaction.cpp |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> ClusterBalancing::ClusterBalancingEvenDistributionNotPossible [GOOD] |56.3%| RESOURCE $(sbr:770480022) - 3.16 MB >> CheckIntegrityMirror3of4::PlacementOk >> VDiskBalancing::TestStopOneNode_Mirror3dc |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |56.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/actor2.cpp |56.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/general_compaction.cpp |56.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest |56.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |56.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__overload.cpp >> CheckIntegrityMirror3of4::PlacementOk [GOOD] >> FormatTimes::DurationUs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> ClusterBalancing::ClusterBalancingEvenDistributionNotPossible [GOOD] Test command err: RandomSeed# 15912673608155472801 2025-11-03T14:00:54.541388Z 5 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:4:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:54.541657Z 5 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:4:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2250718220687994772] 2025-11-03T14:00:54.542194Z 5 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:4:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:54.551226Z 4 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:54.551487Z 4 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 4459682593093667300] 2025-11-03T14:00:54.551963Z 4 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:3:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:54.561076Z 2 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:1:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:54.561349Z 2 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:1:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2789391214597265644] 2025-11-03T14:00:54.561810Z 2 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:1:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:54.615451Z 8 00h01m16.004608s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:7:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:54.615703Z 8 00h01m16.004608s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:7:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 9528350836361894621] 2025-11-03T14:00:54.616119Z 8 00h01m16.004608s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:7:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:54.625414Z 3 00h01m17.005120s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:54.625675Z 3 00h01m17.005120s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17690210927587270315] 2025-11-03T14:00:54.626098Z 3 00h01m17.005120s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:2:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:54.635593Z 6 00h01m18.005632s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:54.635859Z 6 00h01m18.005632s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 11516963145882402226] 2025-11-03T14:00:54.636299Z 6 00h01m18.005632s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000002:_:0:5:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:54.692614Z 1 00h01m31.006144s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:54.692911Z 1 00h01m31.006144s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 3290192689377939841] 2025-11-03T14:00:54.693350Z 1 00h01m31.006144s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:54.703028Z 7 00h01m32.006656s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:54.703306Z 7 00h01m32.006656s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2501502775416748361] 2025-11-03T14:00:54.703728Z 7 00h01m32.006656s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000001:_:0:6:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 |56.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a >> CheckIntegrityMirror3of4::PlacementMissingParts >> CheckIntegrityMirror3of4::PlacementMissingParts [GOOD] >> CheckIntegrityMirror3of4::PlacementDisintegrated |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/ctas >> CheckIntegrityMirror3of4::PlacementDisintegrated [GOOD] >> ClusterBalancing::ClusterBalancingEvenDistribution [GOOD] >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |56.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |56.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationUs [GOOD] |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/security/ydb-tests-functional-security |56.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/sharing.cpp >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] |56.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |56.3%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose |56.3%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |56.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/compaction.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3of4::PlacementDisintegrated [GOOD] Test command err: RandomSeed# 5125239467922666446 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** Group is disintegrated or has network problems |56.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__statistics.cpp |56.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/slice_builder/pack_builder.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest >> ClusterBalancing::ClusterBalancingEvenDistribution [GOOD] Test command err: RandomSeed# 12955967592711811180 2025-11-03T14:00:55.552905Z 3 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.553199Z 3 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:2:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 944922886432900545] 2025-11-03T14:00:55.553789Z 3 00h01m01.003072s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:2:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:55.563861Z 8 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000002:_:0:7:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.564147Z 8 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000002:_:0:7:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 16274689912459326551] 2025-11-03T14:00:55.564686Z 8 00h01m02.003584s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000002:_:0:7:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:55.574388Z 6 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:5:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.574680Z 6 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:5:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 208258986097909247] 2025-11-03T14:00:55.575127Z 6 00h01m03.004096s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:5:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:55.591430Z 2 00h01m04.004608s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.591723Z 2 00h01m04.004608s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17906231263766489648] 2025-11-03T14:00:55.592229Z 2 00h01m04.004608s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:55.652825Z 7 00h01m16.005120s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.653105Z 7 00h01m16.005120s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:6:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 18138643645669898965] 2025-11-03T14:00:55.653529Z 7 00h01m16.005120s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000001:_:0:6:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:55.663971Z 3 00h01m17.005632s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:2:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.664281Z 3 00h01m17.005632s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:2:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 118061197131221646] 2025-11-03T14:00:55.664740Z 3 00h01m17.005632s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:2:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:55.674838Z 5 00h01m18.006144s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:4:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.675108Z 5 00h01m18.006144s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:4:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17127093659954233118] 2025-11-03T14:00:55.675573Z 5 00h01m18.006144s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000003:_:0:4:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:55.691681Z 1 00h01m19.006656s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.691948Z 1 00h01m19.006656s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2461839119932972284] 2025-11-03T14:00:55.692403Z 1 00h01m19.006656s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:55.759469Z 2 00h01m31.007168s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:1:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.759744Z 2 00h01m31.007168s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:1:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15489531333179172336] 2025-11-03T14:00:55.760167Z 2 00h01m31.007168s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:1:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:55.770785Z 5 00h01m32.007680s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:4:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.771046Z 5 00h01m32.007680s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:4:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 6925011740438436977] 2025-11-03T14:00:55.771473Z 5 00h01m32.007680s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:4:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:55.782056Z 1 00h01m33.008192s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:0:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.782316Z 1 00h01m33.008192s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:0:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 7468855405172331994] 2025-11-03T14:00:55.782789Z 1 00h01m33.008192s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:0:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:55.798311Z 4 00h01m34.008704s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:3:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.798579Z 4 00h01m34.008704s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:3:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 3074784985548275864] 2025-11-03T14:00:55.799045Z 4 00h01m34.008704s :BS_SYNCER ERROR: PDiskId# 1002 VDISK[82000000:_:0:3:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:55.862106Z 4 00h01m46.009216s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.862410Z 4 00h01m46.009216s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:3:0]: (2181038081) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 10543488356751713893] 2025-11-03T14:00:55.862866Z 4 00h01m46.009216s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000001:_:0:3:0]: (2181038081) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:55.873774Z 6 00h01m47.009728s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.874040Z 6 00h01m47.009728s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:5:0]: (2181038082) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 3294311725990827158] 2025-11-03T14:00:55.874499Z 6 00h01m47.009728s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000002:_:0:5:0]: (2181038082) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:55.885601Z 8 00h01m48.010240s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:7:0]: (2181038083) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.885885Z 8 00h01m48.010240s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:7:0]: (2181038083) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15251149285566618053] 2025-11-03T14:00:55.886367Z 8 00h01m48.010240s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000003:_:0:7:0]: (2181038083) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 2025-11-03T14:00:55.902904Z 7 00h01m49.010752s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:00:55.903209Z 7 00h01m49.010752s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2790290738967002931] 2025-11-03T14:00:55.903670Z 7 00h01m49.010752s :BS_SYNCER ERROR: PDiskId# 1003 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Mirror3dc [GOOD] Test command err: RandomSeed# 13522846099596213065 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2025-11-03T14:00:55.688868Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/tpch/ydb-tests-functional-tpc-medium-tpch |56.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |56.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/general_compaction.cpp |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so |56.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |56.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication >> StatsFormat::FullStat [GOOD] >> FormatTimes::ParseDuration [GOOD] >> BSCRestartPDisk::RestartOneByOne |56.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartBrokenDiskInBrokenGroup [GOOD] Test command err: RandomSeed# 14208141021163612013 2025-11-03T14:00:56.242024Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242048Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242054Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242059Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242065Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242070Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242076Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242081Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242261Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:56.242285Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:56.242298Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:56.242311Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:56.242323Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:56.242335Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:56.242348Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:56.242361Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:56.242375Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242380Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242387Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242391Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242395Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242399Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242403Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242407Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:56.242994Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:56.243017Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:56.243028Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:56.243041Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:56.243090Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:56.243100Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:56.243110Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:56.243119Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 |56.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/read_metadata.cpp |56.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_impl.cpp |56.2%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/docker-compose >> BSCRestartPDisk::RestartOneByOneWithReconnects >> SelfHealActorTest::SingleErrorDisk [GOOD] >> BsControllerTest::SelfHealMirror3dc >> BsControllerTest::SelfHealBlock4Plus2 |56.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |56.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::FullStat [GOOD] |56.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::ParseDuration [GOOD] >> BSCRestartPDisk::RestartNotAllowed >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |56.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 |56.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__init.cpp |56.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |56.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::SingleErrorDisk [GOOD] |56.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a >> BsControllerTest::DecommitRejected [GOOD] |56.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest |56.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest |56.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/modification/tasks/modification.cpp >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 >> VDiskBalancing::TestRandom_Block42 |56.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] >> CheckIntegrityBlock42::PlacementWrongDisks >> FormatTimes::DurationMs [GOOD] >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed >> CheckIntegrityBlock42::PlacementBlobIsLost |56.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> SelfHealActorTest::NoMoreThanOneReplicating [GOOD] |56.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard__progress_tx.cpp |56.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |56.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part2/ydb-library-yql-tests-sql-dq_file-part2 >> CheckIntegrityBlock42::PlacementWrongDisks [GOOD] >> CheckIntegrityMirror3dc::DataErrorOneCopy >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] |56.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/suite_tests/ydb-tests-functional-suite_tests |56.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest |56.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so >> CheckIntegrityBlock42::PlacementBlobIsLost [GOOD] >> CheckIntegrityBlock42::PlacementAllOnHandoff >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] >> CheckIntegrityMirror3dc::DataErrorOneCopy [GOOD] >> CheckIntegrityMirror3dc::DataErrorManyCopies >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] >> CheckIntegrityBlock42::PlacementOk >> BSCRestartPDisk::RestartNotAllowed [GOOD] >> CheckIntegrityBlock42::PlacementAllOnHandoff [GOOD] >> CheckIntegrityBlock42::PlacementDisintegrated >> CheckIntegrityMirror3dc::DataErrorManyCopies [GOOD] |56.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> FormatTimes::DurationMs [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42 [GOOD] Test command err: RandomSeed# 6328785966037984001 SEND TEvPut with key [1:1:1:0:0:100:0] 2025-11-03T14:00:58.964058Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-11-03T14:00:58.964163Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2025-11-03T14:00:58.987788Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 >> CheckIntegrityBlock42::PlacementDisintegrated [GOOD] >> CheckIntegrityBlock42::DataStatusUnknown >> Config::IncludeScope [GOOD] >> CheckIntegrityBlock42::PlacementOk [GOOD] >> CheckIntegrityBlock42::PlacementOkHandoff >> Config::ExcludeScope [GOOD] >> CheckIntegrityBlock42::PlacementOkWithErrors >> CheckIntegrityBlock42::DataOk >> CheckIntegrityBlock42::DataStatusUnknown [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyOneByOne [GOOD] Test command err: RandomSeed# 16103841924286822192 |56.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/modification/transactions/tx_change_blobs_owning.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartGoodDiskInBrokenGroupNotAllowed [GOOD] Test command err: RandomSeed# 2739758954598102467 2025-11-03T14:00:59.252225Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:59.252259Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:59.252268Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:59.252276Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:59.252284Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:59.252292Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:59.252300Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:59.252579Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:59.252623Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:59.252640Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:59.252656Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:59.252672Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:59.252686Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:59.252703Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:59.252721Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:59.252729Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:59.252736Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:59.252752Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:59.252763Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:59.252770Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:59.252777Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:00:59.253265Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:59.253276Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:59.253284Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:59.253295Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:59.253303Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:59.253312Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:00:59.253319Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 |56.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a >> CheckIntegrityBlock42::PlacementOkHandoff [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartNotAllowed [GOOD] Test command err: RandomSeed# 7147316348207729574 |56.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard__progress_tx.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityMirror3dc::DataErrorManyCopies [GOOD] Test command err: RandomSeed# 351625571561146496 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** Disks: 0: [82000000:1:1:0:0] 1: [82000000:1:2:0:0] 2: [82000000:1:0:0:0] 3: [82000000:1:1:1:0] 4: [82000000:1:2:1:0] 5: [82000000:1:0:1:0] 6: [82000000:1:1:2:0] 7: [82000000:1:2:2:0] 8: [82000000:1:0:2:0] Layout info: ver0 disks [ 0 1 ], ver1 disks [ 2 ] ERROR: There are unequal parts *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:1:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:2:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:1:0:0] 1: [82000000:1:2:0:0] 2: [82000000:1:0:0:0] 3: [82000000:1:1:1:0] 4: [82000000:1:2:1:0] 5: [82000000:1:0:1:0] 6: [82000000:1:1:2:0] 7: [82000000:1:2:2:0] 8: [82000000:1:0:2:0] Layout info: ver0 disks [ 0 1 2 ], ver1 disks [ 3 4 5 ] ERROR: There are unequal parts |56.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> Config::IncludeScope [GOOD] |56.1%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a >> CheckIntegrityBlock42::PlacementMissingParts >> CheckIntegrityBlock42::PlacementOkWithErrors [GOOD] |56.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/hooks/abstract/abstract.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestDontSendToReadOnlyTest_Block42 [GOOD] Test command err: RandomSeed# 8741652291536954857 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:100:0] 2025-11-03T14:00:59.425198Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:6355:837] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Start compaction Finish compaction >> CheckIntegrityBlock42::DataOk [GOOD] >> VDiskRestart::Simple [GOOD] >> Donor::SlayAfterWiping >> CheckIntegrityBlock42::PlacementMissingParts [GOOD] >> CheckIntegrityBlock42::PlacementWithErrorsOnBlobDisks >> Donor::ConsistentWritesWhenSwitchingToDonorMode >> CheckIntegrityBlock42::DataOkAdditionalEqualParts >> CheckIntegrityBlock42::PlacementWithErrorsOnBlobDisks [GOOD] >> CheckIntegrityBlock42::PlacementStatusUnknown >> CheckIntegrityBlock42::DataOkAdditionalEqualParts [GOOD] >> CheckIntegrityBlock42::DataErrorSixPartsTwoBroken |56.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> Config::ExcludeScope [GOOD] |56.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/session/source.cpp |56.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> BsControllerTest::SelfHealMirror3dc [GOOD] >> Donor::SlayAfterWiping [GOOD] >> Donor::CheckOnlineReadRequestToDonor >> CheckIntegrityBlock42::PlacementStatusUnknown [GOOD] |56.0%| [EN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |56.0%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |56.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |56.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> VDiskRestart::Simple [GOOD] |56.0%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/library/recipes/docker_compose/bin/{recipes-docker_compose-bin.final.pkg.fake ... library/recipes/docker_compose/bin/docker-compose} >> Donor::MultipleEvicts >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob >> StatsFormat::AggregateStat [GOOD] >> CheckIntegrityBlock42::DataErrorSixPartsTwoBroken [GOOD] >> CheckIntegrityBlock42::DataOkErasureFiveParts |56.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::PlacementMissingParts [GOOD] Test command err: RandomSeed# 10595166576524248336 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:4:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** |55.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so >> Donor::SkipBadDonor >> Donor::MultipleEvicts [GOOD] |55.9%| RESOURCE $(sbr:4966407557) |55.9%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a >> Donor::CheckOnlineReadRequestToDonor [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataStatusUnknown [GOOD] Test command err: RandomSeed# 17498664729098686057 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Group is disintegrated or has network problems *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: part 2: part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |55.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/read_metadata.cpp |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |55.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_internal_scan.cpp >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] >> CheckIntegrityBlock42::DataOkErasureFiveParts [GOOD] |55.9%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/compute/common/ut/unittest >> StatsFormat::AggregateStat [GOOD] >> Donor::SkipBadDonor [GOOD] |55.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |56.0%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/transaction/tx_scan.cpp |56.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> TStateStorageRingGroupState::TestStateStorageDoubleReply ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SlayAfterWiping [GOOD] Test command err: RandomSeed# 18251194272816658352 2025-11-03T14:01:01.128062Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:01:01.128472Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2147287961883736519] 2025-11-03T14:01:01.129312Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> BridgeGet::PartRestorationAcrossBridgeOnRange ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::DecommitRejected [GOOD] Test command err: 2025-11-03T14:00:58.757135Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-11-03T14:00:58.757158Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-11-03T14:00:58.757176Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-11-03T14:00:58.757180Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-11-03T14:00:58.757188Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-11-03T14:00:58.757192Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-11-03T14:00:58.757199Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-11-03T14:00:58.757204Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-11-03T14:00:58.757211Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-11-03T14:00:58.757215Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-11-03T14:00:58.757225Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-11-03T14:00:58.757229Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-11-03T14:00:58.757235Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-11-03T14:00:58.757241Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-11-03T14:00:58.757247Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-11-03T14:00:58.757251Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-11-03T14:00:58.757257Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-11-03T14:00:58.757262Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-11-03T14:00:58.757269Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-11-03T14:00:58.757275Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-11-03T14:00:58.757283Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-11-03T14:00:58.757287Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-11-03T14:00:58.757293Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-11-03T14:00:58.757298Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-11-03T14:00:58.757310Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-11-03T14:00:58.757314Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-11-03T14:00:58.757320Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-11-03T14:00:58.757325Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-11-03T14:00:58.757331Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-11-03T14:00:58.757336Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-11-03T14:00:58.760155Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:514:38] Status# ERROR ClientId# [1:514:38] ServerId# [0:0:0] PipeClient# [1:514:38] 2025-11-03T14:00:58.760302Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:515:20] Status# ERROR ClientId# [2:515:20] ServerId# [0:0:0] PipeClient# [2:515:20] 2025-11-03T14:00:58.760313Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:516:20] Status# ERROR ClientId# [3:516:20] ServerId# [0:0:0] PipeClient# [3:516:20] 2025-11-03T14:00:58.760322Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:517:20] Status# ERROR ClientId# [4:517:20] ServerId# [0:0:0] PipeClient# [4:517:20] 2025-11-03T14:00:58.760330Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:518:20] Status# ERROR ClientId# [5:518:20] ServerId# [0:0:0] PipeClient# [5:518:20] 2025-11-03T14:00:58.760339Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:519:20] Status# ERROR ClientId# [6:519:20] ServerId# [0:0:0] PipeClient# [6:519:20] 2025-11-03T14:00:58.760346Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:520:20] Status# ERROR ClientId# [7:520:20] ServerId# [0:0:0] PipeClient# [7:520:20] 2025-11-03T14:00:58.760353Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:521:20] Status# ERROR ClientId# [8:521:20] ServerId# [0:0:0] PipeClient# [8:521:20] 2025-11-03T14:00:58.760360Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:522:20] Status# ERROR ClientId# [9:522:20] ServerId# [0:0:0] PipeClient# [9:522:20] 2025-11-03T14:00:58.760368Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:523:20] Status# ERROR ClientId# [10:523:20] ServerId# [0:0:0] PipeClient# [10:523:20] 2025-11-03T14:00:58.760378Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:524:20] Status# ERROR ClientId# [11:524:20] ServerId# [0:0:0] PipeClient# [11:524:20] 2025-11-03T14:00:58.760385Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:525:20] Status# ERROR ClientId# [12:525:20] ServerId# [0:0:0] PipeClient# [12:525:20] 2025-11-03T14:00:58.760402Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:526:20] Status# ERROR ClientId# [13:526:20] ServerId# [0:0:0] PipeClient# [13:526:20] 2025-11-03T14:00:58.760409Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:527:20] Status# ERROR ClientId# [14:527:20] ServerId# [0:0:0] PipeClient# [14:527:20] 2025-11-03T14:00:58.760416Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:528:20] Status# ERROR ClientId# [15:528:20] ServerId# [0:0:0] PipeClient# [15:528:20] 2025-11-03T14:00:58.768526Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] Connect 2025-11-03T14:00:58.768549Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] Connect 2025-11-03T14:00:58.768559Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] Connect 2025-11-03T14:00:58.768576Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] Connect 2025-11-03T14:00:58.768603Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] Connect 2025-11-03T14:00:58.768612Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] Connect 2025-11-03T14:00:58.768622Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] Connect 2025-11-03T14:00:58.768629Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] Connect 2025-11-03T14:00:58.768637Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] Connect 2025-11-03T14:00:58.768646Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] Connect 2025-11-03T14:00:58.768654Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] Connect 2025-11-03T14:00:58.768662Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] Connect 2025-11-03T14:00:58.768677Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] Connect 2025-11-03T14:00:58.768686Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] Connect 2025-11-03T14:00:58.768694Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] Connect 2025-11-03T14:00:58.769359Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:589:66] Status# OK ClientId# [1:589:66] ServerId# [1:618:67] PipeClient# [1:589:66] 2025-11-03T14:00:58.769371Z 1 00h00m00.100000s :BS_NODE DEBUG: [1] State switched from 0 to 1 2025-11-03T14:00:58.770410Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:590:21] Status# OK ClientId# [2:590:21] ServerId# [1:619:68] PipeClient# [2:590:21] 2025-11-03T14:00:58.770425Z 2 00h00m00.100000s :BS_NODE DEBUG: [2] State switched from 0 to 1 2025-11-03T14:00:58.770434Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:591:21] Status# OK ClientId# [3:591:21] ServerId# [1:620:69] PipeClient# [3:591:21] 2025-11-03T14:00:58.770439Z 3 00h00m00.100000s :BS_NODE DEBUG: [3] State switched from 0 to 1 2025-11-03T14:00:58.770446Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:592:21] Status# OK ClientId# [4:592:21] ServerId# [1:621:70] PipeClient# [4:592:21] 2025-11-03T14:00:58.770451Z 4 00h00m00.100000s :BS_NODE DEBUG: [4] State switched from 0 to 1 2025-11-03T14:00:58.770458Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:593:21] Status# OK ClientId# [5:593:21] ServerId# [1:622:71] PipeClient# [5:593:21] 2025-11-03T14:00:58.770464Z 5 00h00m00.100000s :BS_NODE DEBUG: [5] State switched from 0 to 1 2025-11-03T14:00:58.770471Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:594:21] Status# OK ClientId# [6:594:21] ServerId# [1:623:72] PipeClient# [6:594:21] 2025-11-03T14:00:58.770476Z 6 00h00m00.100000s :BS_NODE DEBUG: [6] State switched from 0 to 1 2025-11-03T14:00:58.770483Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:595:21] Status# OK ClientId# [7:595:21] ServerId# [1:624:73] PipeClient# [7:595:21] 2025-11-03T14:00:58.770488Z 7 00h00m00.100000s :BS_NODE DEBUG: [7] State switched from 0 to 1 2025-11-03T14:00:58.770497Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:596:21] Status# OK ClientId# [8:596:21] ServerId# [1:625:74] PipeClient# [8:596:21] 2025-11-03T14:00:58.770503Z 8 00h00m00.100000s :BS_NODE DEBUG: [8] State switched from 0 to 1 2025-11-03T14:00:58.770510Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:597:21] Status# OK ClientId# [9:597:21] ServerId# [1:626:75] PipeClient# [9:597:21] 2025-11-03T14:00:58.770514Z 9 00h00m00.100000s :BS_NODE DEBUG: [9] State switched from 0 to 1 2025-11-03T14:00:58.770522Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:598:21] Status# OK ClientId# [10:598:21] ServerId# [1:627:76] PipeClient# [10:598:21] 2025-11-03T14:00:58.770527Z 10 00h00m00.100000s :BS_NODE DEBUG: [10] State switched from 0 to 1 2025-11-03T14:00:58.770534Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:599:21] Status# OK ClientId# [11:599:21] ServerId# [1:628:77] PipeClient# [11:599:21] 2025-11-03T14:00:58.770539Z 11 00h00m00.100000s :BS_NODE DEBUG: [11] State switched from 0 to 1 2025-11-03T14:00:58.770546Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:600:21] Status# OK ClientId# [12:600:21] ServerId# [1:629:78] PipeClient# [12:600:21] 2025-11-03T14:00:58.770552Z 12 00h00m00.100000s :BS_NODE DEBUG: [12] State switched from 0 to 1 2025-11-03T14:00:58.770558Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:601:21] Status# OK ClientId# [13:601:21] ServerId# [1:630:79] PipeClient# [13:601:21] 2025-11-03T14:00:58.770562Z 13 00h00m00.100000s :BS_NODE DEBUG: [13] State switched from 0 to 1 2025-11-03T14:00:58.770568Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:602:21] Status# OK ClientId# [14:602:21] ServerId# [1:631:80] PipeClient# [14:602:21] 2025-11-03T14:00:58.770573Z 14 00h00m00.100000s :BS_NODE DEBUG: [14] State switched from 0 to 1 2025-11-03T14:00:58.770580Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:603:21] Status# OK ClientId# [15:603:21] ServerId# [1:632:81] PipeClient# [15:603:21] 2025-11-03T14:00:58.770585Z 15 00h00m00.100000s :BS_NODE DEBUG: [15] State switched from 0 to 1 2025-11-03T14:00:58.771058Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:00:58.771073Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] PDiskId# 1000 VSlotId# 1000 created 2025-11-03T14:00:58.774937Z 1 00h00m00.100512s :BS_NODE DEBUG: [1] VDiskId# [80000000:1:0:0:0] status changed to INIT_PENDING 2025-11-03T14:00:58.775247Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-11-03T14:00:58.775262Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] PDiskId# 1000 VSlotId# 1000 created 2025-11-03T14:00:58.775277Z 2 00h00m00.100512s :BS_NODE DEBUG: [2] VDiskId# [80000000:1:0:1:0] status changed to INIT_PENDING 2025-11-03T14:00:58.775297Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-11-03T14:00:58.775304Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] PDiskId# 1000 VSlotId# 1000 created 2025-11-03T14:00:58.775312Z 3 00h00m00.100512s :BS_NODE DEBUG: [3] VDiskId# [80000000:1:0:2:0] status changed to INIT_PENDING 2025-11-03T14:00:58.775329Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-11-03T14:00:58.775334Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] PDiskId# 1000 VSlotId# 1000 created 2025-11-03T14:00:58.775342Z 4 00h00m00.100512s :BS_NODE DEBUG: [4] VDiskId# [80000000:1:1:0:0] status changed to INIT_PENDING 2025-11-03T14:00:58.775371Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-11-03T14:00:58.775378Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] PDiskId# 1000 VSlotId# 1000 created 2025-11-03T14:00:58.775385Z 5 00h00m00.100512s :BS_NODE DEBUG: [5] VDiskId# [80000000:1:1:1:0] status changed to INIT_PENDING 2025-11-03T1 ... [80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-11-03T14:00:58.846926Z 7 00h01m15.658536s :BS_NODE DEBUG: [7] NodeServiceSetUpdate 2025-11-03T14:00:58.846936Z 7 00h01m15.658536s :BS_NODE DEBUG: [7] VDiskId# [80000000:1:2:0:0] destroyed 2025-11-03T14:00:58.846970Z 11 00h01m16.593512s :BS_NODE DEBUG: [11] VDiskId# [80000001:1:0:1:0] status changed to READY 2025-11-03T14:00:58.847023Z 1 00h01m16.593512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-11-03T14:00:58.847071Z 1 00h01m20.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-11-03T14:00:58.847112Z 1 00h01m22.702512s :BS_NODE DEBUG: [1] VDiskId# [80000001:1:2:0:0] status changed to READY 2025-11-03T14:00:58.847165Z 1 00h01m22.702512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-11-03T14:00:58.847201Z 10 00h01m23.096512s :BS_NODE DEBUG: [10] VDiskId# [80000001:1:0:0:0] status changed to READY 2025-11-03T14:00:58.847253Z 1 00h01m23.096512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-11-03T14:00:58.847325Z 1 00h01m25.306512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-11-03T14:00:58.847353Z 12 00h01m25.431512s :BS_NODE DEBUG: [12] VDiskId# [80000001:1:0:2:0] status changed to READY 2025-11-03T14:00:58.847408Z 1 00h01m25.431512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-11-03T14:00:58.847437Z 14 00h01m25.879512s :BS_NODE DEBUG: [14] VDiskId# [80000001:1:1:1:0] status changed to READY 2025-11-03T14:00:58.847500Z 1 00h01m25.879512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-11-03T14:00:58.847534Z 2 00h01m27.315512s :BS_NODE DEBUG: [2] VDiskId# [80000001:1:2:1:0] status changed to READY 2025-11-03T14:00:58.847604Z 1 00h01m27.315512s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-11-03T14:00:58.847672Z 1 00h01m30.000000s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] NotReady},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-11-03T14:00:58.847732Z 1 00h01m30.658024s :BS_SELFHEAL INFO: {BSSH11@self_heal.cpp:719} group can't be reassigned right now [{[80000000:3:0:0:0] Ready},{[80000000:3:0:1:0] Ready},{[80000000:3:0:2:0] Ready},{[80000000:3:1:0:0] Ready},{[80000000:3:1:1:0] Ready},{[80000000:3:1:2:0] Ready},{[80000000:3:2:0:0] Ready},{[80000000:3:2:1:0] NotReady},{[80000000:3:2:2:0] Ready RequiresReassignment Decommitted}] GroupId# 2147483648 2025-11-03T14:00:58.847789Z 14 00h01m30.835536s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] status changed to READY 2025-11-03T14:00:58.847862Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483648 2025-11-03T14:00:58.848007Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:58.848018Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:0:0] DiskIsOk# true 2025-11-03T14:00:58.848066Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:58.848073Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:1:0] DiskIsOk# true 2025-11-03T14:00:58.848080Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:58.848087Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:0:2:0] DiskIsOk# true 2025-11-03T14:00:58.848094Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:58.848101Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:0:0] DiskIsOk# true 2025-11-03T14:00:58.848108Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:58.848115Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:1:0] DiskIsOk# true 2025-11-03T14:00:58.848122Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:58.848128Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:1:2:0] DiskIsOk# true 2025-11-03T14:00:58.848136Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:58.848142Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:0:0] DiskIsOk# true 2025-11-03T14:00:58.848149Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483648 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:58.848155Z 1 00h01m30.835536s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483648 VDiskId# [80000000:3:2:1:0] DiskIsOk# true 2025-11-03T14:00:58.848646Z 1 00h01m30.836048s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:00:58.848660Z 1 00h01m30.836048s :BS_NODE DEBUG: [1] VDiskId# [80000000:3:0:0:0] -> [80000000:4:0:0:0] 2025-11-03T14:00:58.848788Z 1 00h01m30.836048s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483648 Items# [80000000:3:2:2:0]: 9:1000:1000 -> 15:1000:1001 ConfigTxSeqNo# 23 2025-11-03T14:00:58.848796Z 1 00h01m30.836048s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483648 Success# true 2025-11-03T14:00:58.848820Z 8 00h01m30.836048s :BS_NODE DEBUG: [8] NodeServiceSetUpdate 2025-11-03T14:00:58.848827Z 8 00h01m30.836048s :BS_NODE DEBUG: [8] VDiskId# [80000000:2:2:1:0] destroyed 2025-11-03T14:00:58.848844Z 2 00h01m30.836048s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-11-03T14:00:58.848853Z 2 00h01m30.836048s :BS_NODE DEBUG: [2] VDiskId# [80000000:3:0:1:0] -> [80000000:4:0:1:0] 2025-11-03T14:00:58.848868Z 3 00h01m30.836048s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-11-03T14:00:58.848875Z 3 00h01m30.836048s :BS_NODE DEBUG: [3] VDiskId# [80000000:3:0:2:0] -> [80000000:4:0:2:0] 2025-11-03T14:00:58.848890Z 4 00h01m30.836048s :BS_NODE DEBUG: [4] NodeServiceSetUpdate 2025-11-03T14:00:58.848898Z 4 00h01m30.836048s :BS_NODE DEBUG: [4] VDiskId# [80000000:3:1:0:0] -> [80000000:4:1:0:0] 2025-11-03T14:00:58.848912Z 5 00h01m30.836048s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-11-03T14:00:58.848920Z 5 00h01m30.836048s :BS_NODE DEBUG: [5] VDiskId# [80000000:3:1:1:0] -> [80000000:4:1:1:0] 2025-11-03T14:00:58.848935Z 6 00h01m30.836048s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-11-03T14:00:58.848943Z 6 00h01m30.836048s :BS_NODE DEBUG: [6] VDiskId# [80000000:3:1:2:0] -> [80000000:4:1:2:0] 2025-11-03T14:00:58.848954Z 9 00h01m30.836048s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-11-03T14:00:58.848968Z 13 00h01m30.836048s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-11-03T14:00:58.848976Z 13 00h01m30.836048s :BS_NODE DEBUG: [13] VDiskId# [80000000:3:2:0:0] -> [80000000:4:2:0:0] 2025-11-03T14:00:58.848991Z 14 00h01m30.836048s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-11-03T14:00:58.848999Z 14 00h01m30.836048s :BS_NODE DEBUG: [14] VDiskId# [80000000:3:2:1:0] -> [80000000:4:2:1:0] 2025-11-03T14:00:58.849014Z 15 00h01m30.836048s :BS_NODE DEBUG: [15] NodeServiceSetUpdate 2025-11-03T14:00:58.849021Z 15 00h01m30.836048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] PDiskId# 1000 VSlotId# 1001 created 2025-11-03T14:00:58.849032Z 15 00h01m30.836048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to INIT_PENDING 2025-11-03T14:00:58.849289Z 15 00h01m32.455048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to REPLICATING 2025-11-03T14:00:58.849410Z 15 00h01m35.957512s :BS_NODE DEBUG: [15] VDiskId# [80000001:1:1:2:0] status changed to READY 2025-11-03T14:00:58.849537Z 15 00h01m38.048048s :BS_NODE DEBUG: [15] VDiskId# [80000000:4:2:2:0] status changed to READY 2025-11-03T14:00:58.849728Z 9 00h01m38.048560s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-11-03T14:00:58.849740Z 9 00h01m38.048560s :BS_NODE DEBUG: [9] VDiskId# [80000000:3:2:2:0] destroyed 2025-11-03T14:00:58.849849Z 13 00h01m40.208512s :BS_NODE DEBUG: [13] VDiskId# [80000001:1:1:0:0] status changed to READY |55.9%| [EN] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |55.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/actor/export_actor.cpp |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::MultipleEvicts [GOOD] Test command err: RandomSeed# 12288556893483646640 0 donors: 2025-11-03T14:01:02.054497Z 26 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:01:02.054565Z 26 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15933784129882376563] 2025-11-03T14:01:02.055573Z 26 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 24:1000 2025-11-03T14:01:02.069192Z 24 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:01:02.069256Z 24 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15933784129882376563] 2025-11-03T14:01:02.070080Z 24 00h00m20.012048s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2025-11-03T14:01:02.083428Z 26 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:01:02.083490Z 26 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15933784129882376563] 2025-11-03T14:01:02.084335Z 26 00h00m20.013072s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 24:1000 2025-11-03T14:01:02.098359Z 24 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:01:02.098426Z 24 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15933784129882376563] 2025-11-03T14:01:02.099295Z 24 00h00m20.014096s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2025-11-03T14:01:02.113962Z 26 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:01:02.114029Z 26 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15933784129882376563] 2025-11-03T14:01:02.114886Z 26 00h00m20.015120s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 24:1000 2025-11-03T14:01:02.128954Z 24 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:01:02.129018Z 24 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15933784129882376563] 2025-11-03T14:01:02.129854Z 24 00h00m20.016144s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2025-11-03T14:01:02.143968Z 26 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:01:02.144036Z 26 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15933784129882376563] 2025-11-03T14:01:02.144935Z 26 00h00m20.017168s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 24:1000 2025-11-03T14:01:02.159370Z 24 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:01:02.159441Z 24 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15933784129882376563] 2025-11-03T14:01:02.160347Z 24 00h00m20.018192s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 26:1000 2025-11-03T14:01:02.174655Z 26 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:01:02.174728Z 26 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 15933784129882376563] 2025-11-03T14:01:02.175602Z 26 00h00m20.019216s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:2:1:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 1 donors: 24:1000 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::PlacementStatusUnknown [GOOD] Test command err: RandomSeed# 5786789166706747574 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::SkipBadDonor [GOOD] Test command err: RandomSeed# 6701577025615239 2025-11-03T14:01:02.563780Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:01:02.564198Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 13462723730857610233] 2025-11-03T14:01:02.565351Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 >> TStateStorageRingGroupState::TestStateStorageDoubleReply [GOOD] |55.9%| [SB] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql |55.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/common/session/common.cpp |56.0%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |55.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part11/ydb-tests-fq-yt-kqp_yt_file-part11 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TwoPartsOnOneNodeTest_Block42_HugeBlob [GOOD] Test command err: RandomSeed# 12269512128061328363 SEND TEvPut with key [1:1:1:0:0:3201024:0] 2025-11-03T14:01:02.071930Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-11-03T14:01:02.072031Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:3201024:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 Node 4: Node 5: Node 6: 2 Node 7: 3 2025-11-03T14:01:02.090867Z 1 00h01m00.011024s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 7 Node 0: 4 Node 1: 5 Node 2: 6 Node 3: 1 2 Node 4: Node 5: 1 Node 6: Node 7: 3 Start compaction 1 Finish compaction 1 >> TStateStorageRingGroupState::TestStateStorageUpdateSig [GOOD] >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::CheckOnlineReadRequestToDonor [GOOD] Test command err: RandomSeed# 4975142414197337247 2025-11-03T14:01:01.693649Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:01:01.693899Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 4803729996989226741] 2025-11-03T14:01:01.694543Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:0:0:0:2097152:1] 2025-11-03T14:01:01.694571Z 7 00h01m11.311024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:6:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 1 PartsResurrected# 1 |55.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/show_create_view |55.9%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |55.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_file/part12/ydb-tests-fq-yt-kqp_yt_file-part12 |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/unittest >> BSCRestartPDisk::RestartOneByOne [GOOD] |55.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/constructor.cpp >> BridgeGet::PartRestorationAcrossBridgeOnDiscover >> TStateStorageRingGroupState::TestProxyConfigMismatch [GOOD] |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |55.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridge ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOneWithReconnects [GOOD] Test command err: RandomSeed# 4528028970189771545 |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |55.9%| [UN] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/functional/postgresql/psql/psql ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/unittest >> BSCRestartPDisk::RestartOneByOne [GOOD] Test command err: RandomSeed# 4083292261212641132 |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |55.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::SelfHealBlock4Plus2 [GOOD] Test command err: 2025-11-03T14:00:58.019915Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-11-03T14:00:58.019939Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-11-03T14:00:58.019967Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-11-03T14:00:58.019975Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-11-03T14:00:58.019981Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-11-03T14:00:58.019985Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-11-03T14:00:58.019992Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-11-03T14:00:58.019996Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-11-03T14:00:58.020003Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-11-03T14:00:58.020007Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-11-03T14:00:58.020014Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-11-03T14:00:58.020018Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-11-03T14:00:58.020024Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-11-03T14:00:58.020028Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-11-03T14:00:58.020035Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-11-03T14:00:58.020040Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-11-03T14:00:58.020053Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-11-03T14:00:58.020058Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-11-03T14:00:58.020065Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-11-03T14:00:58.020070Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-11-03T14:00:58.020077Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-11-03T14:00:58.020081Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-11-03T14:00:58.020087Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-11-03T14:00:58.020092Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-11-03T14:00:58.020099Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-11-03T14:00:58.020104Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-11-03T14:00:58.020110Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-11-03T14:00:58.020115Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-11-03T14:00:58.020122Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-11-03T14:00:58.020127Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-11-03T14:00:58.020133Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-11-03T14:00:58.020138Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-11-03T14:00:58.020144Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-11-03T14:00:58.020148Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-11-03T14:00:58.020155Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-11-03T14:00:58.020159Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-11-03T14:00:58.020166Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-11-03T14:00:58.020171Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-11-03T14:00:58.020177Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-11-03T14:00:58.020182Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-11-03T14:00:58.020188Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-11-03T14:00:58.020192Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-11-03T14:00:58.020198Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-11-03T14:00:58.020202Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-11-03T14:00:58.020209Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-11-03T14:00:58.020213Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-11-03T14:00:58.020220Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-11-03T14:00:58.020225Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-11-03T14:00:58.020237Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-11-03T14:00:58.020241Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-11-03T14:00:58.020247Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-11-03T14:00:58.020252Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-11-03T14:00:58.020258Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-11-03T14:00:58.020264Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-11-03T14:00:58.020271Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-11-03T14:00:58.020275Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-11-03T14:00:58.020281Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-11-03T14:00:58.020285Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-11-03T14:00:58.020292Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-11-03T14:00:58.020296Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-11-03T14:00:58.020302Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-11-03T14:00:58.020307Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-11-03T14:00:58.020312Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-11-03T14:00:58.020316Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-11-03T14:00:58.024614Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2163:55] Status# ERROR ClientId# [1:2163:55] ServerId# [0:0:0] PipeClient# [1:2163:55] 2025-11-03T14:00:58.024928Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2164:37] Status# ERROR ClientId# [2:2164:37] ServerId# [0:0:0] PipeClient# [2:2164:37] 2025-11-03T14:00:58.024942Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2165:37] Status# ERROR ClientId# [3:2165:37] ServerId# [0:0:0] PipeClient# [3:2165:37] 2025-11-03T14:00:58.024950Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2166:37] Status# ERROR ClientId# [4:2166:37] ServerId# [0:0:0] PipeClient# [4:2166:37] 2025-11-03T14:00:58.024959Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2167:37] Status# ERROR ClientId# [5:2167:37] ServerId# [0:0:0] PipeClient# [5:2167:37] 2025-11-03T14:00:58.024966Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2168:37] Status# ERROR ClientId# [6:2168:37] ServerId# [0:0:0] PipeClient# [6:2168:37] 2025-11-03T14:00:58.024974Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2169:37] Status# ERROR ClientId# [7:2169:37] ServerId# [0:0:0] PipeClient# [7:2169:37] 2025-11-03T14:00:58.024981Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2170:37] Status# ERROR ClientId# [8:2170:37] ServerId# [0:0:0] PipeClient# [8:2170:37] 2025-11-03T14:00:58.024988Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2171:37] Status# ERROR ClientId# [9:2171:37] ServerId# [0:0:0] PipeClient# [9:2171:37] 2025-11-03T14:00:58.024996Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2172:37] Status# ERROR ClientId# [10:2172:37] ServerId# [0:0:0] PipeClient# [10:2172:37] 2025-11-03T14:00:58.025003Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2173:37] Status# ERROR ClientId# [11:2173:37] ServerId# [0:0:0] PipeClient# [11:2173:37] 2025-11-03T14:00:58.025011Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2174:37] Status# ERROR ClientId# [12:2174:37] ServerId# [0:0:0] PipeClient# [12:2174:37] 2025-11-03T14:00:58.025017Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2175:37] Status# ERROR ClientId# [13:2175:37] ServerId# [0:0:0] PipeClient# [13:2175:37] 2025-11-03T14:00:58.025028Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2176:37] Status# ERROR ClientId# [14:2176:37] ServerId# [0:0:0] PipeClient# [14:2176:37] 2025-11-03T14:00:58.025035Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2177:37] Status# ERROR ClientId# [15:2177:37] ServerId# [0:0:0] PipeClient# [15:2177:37] 2025-11-03T14:00:58.025041Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2178:37] Status# ERROR ClientId# [16:2178:37] ServerId# [0:0:0] PipeClient# [16:2178:37] 2025-11-03T14:00:58.025048Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2179:37] Status# ERROR ClientId# [17:2179:37] ServerId# [0:0:0] PipeClient# [17:2179:37] 2025-11-03T14:00:58.025055Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2180:37] Status# ERROR ClientId# [18:2180:37] ServerId# [0:0:0] PipeClient# [18:2180:37] 2025-11-03T14:00:58.025063Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2181:37] Status# ERROR ClientId# [19:2181:37] ServerId# [0:0:0] PipeClient# [19:2181:37] 2025-11-03T14:00:58.025070Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2182:37] Status# ERROR ClientId# [20:2182:37] ServerId# [0:0:0] PipeClient# [20:2182:37] 2025-11-03T14:00:58.025078Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2183:37] Status# ERROR ClientId# [21:2183:37] ServerId# [0:0:0] PipeClient# [21:2183:37] 2025-11-03T14:00:58.025085Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2184:37] Status# ERROR ClientId# [22:2184:37] ServerId# [0:0:0] PipeClient# [22:2184:37] 2025-11-03T14:00:58.025092Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2185:37] Status# ERROR ClientId# [23:2185:37] ServerId# [0:0:0] PipeClient# [23:2185:37] 2025-11-03T14:00:58.025103Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2186:37] Status# ERROR ClientId# [24:2186:37] ServerId# [0:0:0] PipeClient# [24:2186:37] 2025-11-03T14:00:58.025110Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2187:37] Status# ERROR ClientId# [25:2187:37] ServerId# [0:0:0] PipeClient# [25:2187:37] 2025-11-03T14:00:58.025118Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2188:37] Status# ERROR ClientId# [26:2188:37] ServerId# [0:0:0] PipeClient# [26:2188:37] 2025-11-03T14:00:58.025125Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2189:37] Status# ERROR ClientId# [27:2189:37] ServerId# [0:0:0] PipeClient# [27:2189:37] 2025-11-03T14:00:58.025133Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2190:37] Status# ERROR ClientId# [28:2190:37] ServerId# [0:0:0] PipeClient# [28:2190:37] 2025-11-03T14:00:58.025141Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2191:37] Status# ERROR ClientId# [29:2191:37] ServerId# [0:0:0] PipeClient# [29:2191:37] 2025-11-03T14:00:58.025149Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2192:37] Status# ERROR ClientId# [30:2192:37] ServerId# [0:0:0] PipeClient# [30:2192:37] 2025-11-03T14:00:58.025157Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2193:37] Status# ERROR ClientId# [31:2193:37] ServerId# [0:0:0] PipeClient# [31:2193:37] 2025-11-03T14:00:58.025165Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2194:37] Status# ERROR ClientId# [32:2194:37] ServerId# [0:0:0] PipeClient# [32:2194:37] 2025-11-03T14:00:58.059944Z 1 00h00m00.002560s :BS_NODE DEBUG: [1] CheckState from [1:2264:79] expected 1 current 0 2025-11-03T14:00:58.059975Z 2 00h00m00.002560s :BS_NODE DEBUG: [2] CheckState from [2:2265:38] expected 1 current 0 2025-11-03T14:00:58.059983Z 3 00h00m00.002560s :BS_NODE DEBUG: [3] CheckState from [3:2266:38] expected 1 current 0 2025-11-03T14:00:58.059990Z 4 00h00m00.002560s :BS_NODE DEBUG: [4] CheckState from [4:2267:38] expected 1 current 0 2025-11-03T14:00:58.059998Z 5 00h00m00.002560s :BS_NODE DEBUG: [5] CheckState from [5:2268:38] expected 1 current 0 2025-11-03T14:00:58.060029Z 6 00h00m00.002560s :BS_NODE DEBUG: [6] CheckState from [6:2269:38] expected 1 current 0 2025-11-03T14:00:58.060044Z 7 00h00m00.002560s :BS_NODE DEBUG: [7] CheckState from [7:2270:38] expected 1 current 0 2025-11-03T14:00:58.060052Z 8 00h00m00.002560s :BS_NODE DEBUG: [8] CheckState from [8:2271:38] expected 1 current 0 2025-11-03T14:00:58.060060Z 9 00h00m00.002560s :BS_NODE DEBUG: [9] CheckState from [9:2272:38] expected 1 current 0 2025-11-03T14:00:58.060067Z 10 00h00m00.002560s :BS_NODE DEBUG: [10] CheckState from [10:2273 ... FHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483703 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:59.830589Z 1 05h15m00.121504s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483703 VDiskId# [80000037:3:0:7:0] DiskIsOk# true 2025-11-03T14:00:59.833079Z 1 05h15m00.122016s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483703 Items# [80000037:3:0:5:0]: 21:1000:1009 -> 5:1000:1017 ConfigTxSeqNo# 512 2025-11-03T14:00:59.833096Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483703 Success# true 2025-11-03T14:00:59.833129Z 17 05h15m00.122016s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-11-03T14:00:59.833142Z 17 05h15m00.122016s :BS_NODE DEBUG: [17] VDiskId# [80000037:3:0:0:0] -> [80000037:4:0:0:0] 2025-11-03T14:00:59.833160Z 21 05h15m00.122016s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-11-03T14:00:59.833178Z 5 05h15m00.122016s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-11-03T14:00:59.833187Z 5 05h15m00.122016s :BS_NODE DEBUG: [5] VDiskId# [80000037:4:0:5:0] PDiskId# 1000 VSlotId# 1017 created 2025-11-03T14:00:59.833201Z 5 05h15m00.122016s :BS_NODE DEBUG: [5] VDiskId# [80000037:4:0:5:0] status changed to INIT_PENDING 2025-11-03T14:00:59.833218Z 26 05h15m00.122016s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2025-11-03T14:00:59.833227Z 26 05h15m00.122016s :BS_NODE DEBUG: [26] VDiskId# [80000037:3:0:1:0] -> [80000037:4:0:1:0] 2025-11-03T14:00:59.833244Z 27 05h15m00.122016s :BS_NODE DEBUG: [27] NodeServiceSetUpdate 2025-11-03T14:00:59.833253Z 27 05h15m00.122016s :BS_NODE DEBUG: [27] VDiskId# [80000037:3:0:2:0] -> [80000037:4:0:2:0] 2025-11-03T14:00:59.833268Z 28 05h15m00.122016s :BS_NODE DEBUG: [28] NodeServiceSetUpdate 2025-11-03T14:00:59.833277Z 28 05h15m00.122016s :BS_NODE DEBUG: [28] VDiskId# [80000037:3:0:3:0] -> [80000037:4:0:3:0] 2025-11-03T14:00:59.833292Z 29 05h15m00.122016s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-11-03T14:00:59.833301Z 29 05h15m00.122016s :BS_NODE DEBUG: [29] VDiskId# [80000037:3:0:4:0] -> [80000037:4:0:4:0] 2025-11-03T14:00:59.833316Z 31 05h15m00.122016s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-11-03T14:00:59.833323Z 31 05h15m00.122016s :BS_NODE DEBUG: [31] VDiskId# [80000037:3:0:6:0] -> [80000037:4:0:6:0] 2025-11-03T14:00:59.833338Z 32 05h15m00.122016s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-11-03T14:00:59.833347Z 32 05h15m00.122016s :BS_NODE DEBUG: [32] VDiskId# [80000037:3:0:7:0] -> [80000037:4:0:7:0] 2025-11-03T14:00:59.833424Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483706 2025-11-03T14:00:59.833643Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483706 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:59.833655Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483706 VDiskId# [8000003a:5:0:0:0] DiskIsOk# true 2025-11-03T14:00:59.833663Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483706 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:59.833669Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483706 VDiskId# [8000003a:5:0:1:0] DiskIsOk# true 2025-11-03T14:00:59.833676Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483706 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:59.833682Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483706 VDiskId# [8000003a:5:0:2:0] DiskIsOk# true 2025-11-03T14:00:59.833688Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483706 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:59.833694Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483706 VDiskId# [8000003a:5:0:3:0] DiskIsOk# true 2025-11-03T14:00:59.833701Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483706 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:59.833707Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483706 VDiskId# [8000003a:5:0:5:0] DiskIsOk# true 2025-11-03T14:00:59.833714Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483706 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:59.833720Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483706 VDiskId# [8000003a:5:0:6:0] DiskIsOk# true 2025-11-03T14:00:59.833730Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483706 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:00:59.833736Z 1 05h15m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483706 VDiskId# [8000003a:5:0:7:0] DiskIsOk# true 2025-11-03T14:00:59.836458Z 1 05h15m00.122528s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483706 Items# [8000003a:5:0:4:0]: 21:1000:1007 -> 11:1001:1017 ConfigTxSeqNo# 513 2025-11-03T14:00:59.836475Z 1 05h15m00.122528s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483706 Success# true 2025-11-03T14:00:59.836510Z 18 05h15m00.122528s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-11-03T14:00:59.836523Z 18 05h15m00.122528s :BS_NODE DEBUG: [18] VDiskId# [8000003a:5:0:1:0] -> [8000003a:6:0:1:0] 2025-11-03T14:00:59.836543Z 19 05h15m00.122528s :BS_NODE DEBUG: [19] NodeServiceSetUpdate 2025-11-03T14:00:59.836552Z 19 05h15m00.122528s :BS_NODE DEBUG: [19] VDiskId# [8000003a:5:0:2:0] -> [8000003a:6:0:2:0] 2025-11-03T14:00:59.836567Z 21 05h15m00.122528s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-11-03T14:00:59.836600Z 22 05h15m00.122528s :BS_NODE DEBUG: [22] NodeServiceSetUpdate 2025-11-03T14:00:59.836610Z 22 05h15m00.122528s :BS_NODE DEBUG: [22] VDiskId# [8000003a:5:0:5:0] -> [8000003a:6:0:5:0] 2025-11-03T14:00:59.836629Z 24 05h15m00.122528s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-11-03T14:00:59.836638Z 24 05h15m00.122528s :BS_NODE DEBUG: [24] VDiskId# [8000003a:5:0:7:0] -> [8000003a:6:0:7:0] 2025-11-03T14:00:59.836657Z 25 05h15m00.122528s :BS_NODE DEBUG: [25] NodeServiceSetUpdate 2025-11-03T14:00:59.836667Z 25 05h15m00.122528s :BS_NODE DEBUG: [25] VDiskId# [8000003a:5:0:6:0] -> [8000003a:6:0:6:0] 2025-11-03T14:00:59.836687Z 11 05h15m00.122528s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2025-11-03T14:00:59.836697Z 11 05h15m00.122528s :BS_NODE DEBUG: [11] VDiskId# [8000003a:6:0:4:0] PDiskId# 1001 VSlotId# 1017 created 2025-11-03T14:00:59.836711Z 11 05h15m00.122528s :BS_NODE DEBUG: [11] VDiskId# [8000003a:6:0:4:0] status changed to INIT_PENDING 2025-11-03T14:00:59.836731Z 13 05h15m00.122528s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-11-03T14:00:59.836740Z 13 05h15m00.122528s :BS_NODE DEBUG: [13] VDiskId# [8000003a:5:0:0:0] -> [8000003a:6:0:0:0] 2025-11-03T14:00:59.836758Z 32 05h15m00.122528s :BS_NODE DEBUG: [32] NodeServiceSetUpdate 2025-11-03T14:00:59.836768Z 32 05h15m00.122528s :BS_NODE DEBUG: [32] VDiskId# [8000003a:5:0:3:0] -> [8000003a:6:0:3:0] 2025-11-03T14:00:59.837040Z 5 05h15m01.426480s :BS_NODE DEBUG: [5] VDiskId# [80000022:5:0:4:0] status changed to REPLICATING 2025-11-03T14:00:59.837160Z 5 05h15m02.152504s :BS_NODE DEBUG: [5] VDiskId# [80000002:5:0:4:0] status changed to REPLICATING 2025-11-03T14:00:59.837242Z 5 05h15m02.177992s :BS_NODE DEBUG: [5] VDiskId# [80000012:7:0:4:0] status changed to REPLICATING 2025-11-03T14:00:59.837322Z 5 05h15m02.484456s :BS_NODE DEBUG: [5] VDiskId# [80000020:6:0:5:0] status changed to REPLICATING 2025-11-03T14:00:59.837427Z 5 05h15m03.327920s :BS_NODE DEBUG: [5] VDiskId# [8000002a:6:0:4:0] status changed to REPLICATING 2025-11-03T14:00:59.837563Z 5 05h15m04.159944s :BS_NODE DEBUG: [5] VDiskId# [8000000a:5:0:4:0] status changed to REPLICATING 2025-11-03T14:00:59.837707Z 5 05h15m04.167016s :BS_NODE DEBUG: [5] VDiskId# [80000037:4:0:5:0] status changed to REPLICATING 2025-11-03T14:00:59.837840Z 5 05h15m04.397432s :BS_NODE DEBUG: [5] VDiskId# [8000001a:5:0:4:0] status changed to REPLICATING 2025-11-03T14:00:59.837972Z 11 05h15m04.954528s :BS_NODE DEBUG: [11] VDiskId# [8000003a:6:0:4:0] status changed to REPLICATING 2025-11-03T14:00:59.838527Z 5 05h15m05.388968s :BS_NODE DEBUG: [5] VDiskId# [80000032:5:0:4:0] status changed to REPLICATING 2025-11-03T14:00:59.839048Z 5 05h15m10.760480s :BS_NODE DEBUG: [5] VDiskId# [80000022:5:0:4:0] status changed to READY 2025-11-03T14:00:59.840401Z 21 05h15m10.760992s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-11-03T14:00:59.840420Z 21 05h15m10.760992s :BS_NODE DEBUG: [21] VDiskId# [80000022:4:0:4:0] destroyed 2025-11-03T14:00:59.840860Z 5 05h15m16.712968s :BS_NODE DEBUG: [5] VDiskId# [80000032:5:0:4:0] status changed to READY 2025-11-03T14:00:59.842195Z 21 05h15m16.713480s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-11-03T14:00:59.842212Z 21 05h15m16.713480s :BS_NODE DEBUG: [21] VDiskId# [80000032:4:0:4:0] destroyed 2025-11-03T14:00:59.842247Z 5 05h15m16.756016s :BS_NODE DEBUG: [5] VDiskId# [80000037:4:0:5:0] status changed to READY 2025-11-03T14:00:59.843525Z 21 05h15m16.756528s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-11-03T14:00:59.843543Z 21 05h15m16.756528s :BS_NODE DEBUG: [21] VDiskId# [80000037:3:0:5:0] destroyed 2025-11-03T14:00:59.843588Z 5 05h15m19.404944s :BS_NODE DEBUG: [5] VDiskId# [8000000a:5:0:4:0] status changed to READY 2025-11-03T14:00:59.844877Z 21 05h15m19.405456s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-11-03T14:00:59.844893Z 21 05h15m19.405456s :BS_NODE DEBUG: [21] VDiskId# [8000000a:4:0:4:0] destroyed 2025-11-03T14:00:59.845332Z 5 05h15m28.170504s :BS_NODE DEBUG: [5] VDiskId# [80000002:5:0:4:0] status changed to READY 2025-11-03T14:00:59.846653Z 21 05h15m28.171016s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-11-03T14:00:59.846669Z 21 05h15m28.171016s :BS_NODE DEBUG: [21] VDiskId# [80000002:4:0:4:0] destroyed 2025-11-03T14:00:59.846709Z 5 05h15m29.834920s :BS_NODE DEBUG: [5] VDiskId# [8000002a:6:0:4:0] status changed to READY 2025-11-03T14:00:59.848078Z 21 05h15m29.835432s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-11-03T14:00:59.848094Z 21 05h15m29.835432s :BS_NODE DEBUG: [21] VDiskId# [8000002a:5:0:4:0] destroyed 2025-11-03T14:00:59.848557Z 5 05h15m30.059456s :BS_NODE DEBUG: [5] VDiskId# [80000020:6:0:5:0] status changed to READY 2025-11-03T14:00:59.849979Z 21 05h15m30.059968s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-11-03T14:00:59.849994Z 21 05h15m30.059968s :BS_NODE DEBUG: [21] VDiskId# [80000020:5:0:5:0] destroyed 2025-11-03T14:00:59.850158Z 5 05h15m33.068992s :BS_NODE DEBUG: [5] VDiskId# [80000012:7:0:4:0] status changed to READY 2025-11-03T14:00:59.851547Z 21 05h15m33.069504s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-11-03T14:00:59.851564Z 21 05h15m33.069504s :BS_NODE DEBUG: [21] VDiskId# [80000012:6:0:4:0] destroyed 2025-11-03T14:00:59.851985Z 5 05h15m35.938432s :BS_NODE DEBUG: [5] VDiskId# [8000001a:5:0:4:0] status changed to READY 2025-11-03T14:00:59.853522Z 21 05h15m35.938944s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-11-03T14:00:59.853537Z 21 05h15m35.938944s :BS_NODE DEBUG: [21] VDiskId# [8000001a:4:0:4:0] destroyed 2025-11-03T14:00:59.853580Z 11 05h15m38.506528s :BS_NODE DEBUG: [11] VDiskId# [8000003a:6:0:4:0] status changed to READY 2025-11-03T14:00:59.854931Z 21 05h15m38.507040s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-11-03T14:00:59.854946Z 21 05h15m38.507040s :BS_NODE DEBUG: [21] VDiskId# [8000003a:5:0:4:0] destroyed |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Block42 |55.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/secondary_index/ydb-tests-datashard-secondary_index |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |55.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/split_merge/ydb-tests-datashard-split_merge |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridge [GOOD] |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |55.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnlyReverseEventsOrder [GOOD] >> TStateStorageRingGroupState::TestBoardConfigMismatch |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/unittest |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |55.9%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/export/session/session.cpp |55.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries >> TStateStorageRingGroupState::TestBoardConfigMismatch [GOOD] >> BSCMovePDisk::PDiskMove_Block42 [GOOD] >> TStateStorage2RingGroups::TestStateStorageReplyOnce [GOOD] |55.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridge [GOOD] Test command err: RandomSeed# 5027567120637331812 originalGroupIndex# 0 indexOnly# 1 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 0 indexOnly# 0 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 1 indexOnly# 1 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 1 indexOnly# 0 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 2 indexOnly# 1 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 originalGroupIndex# 2 indexOnly# 0 mustRestoreFirst# 1 *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 *** performing bridge get *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |55.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/writer/buffer/actor2.cpp |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnceWriteOnlyReverseEventsOrder [GOOD] Test command err: RandomSeed# 14347628182463535599 2025-11-03T14:01:06.282069Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [2:242:20] SessionId# [1:130:1] Cookie# 12620356791158840077 2025-11-03T14:01:06.282098Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 8 2025-11-03T14:01:06.283735Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 2 Cookie# 12620356791158840077 SessionId# [1:130:1] Binding# {2.0/12620356791158840077@[1:130:1]} Record# {RootNodeId: 2 } 2025-11-03T14:01:06.283802Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [3:249:20] SessionId# [2:108:2] Cookie# 14210779570963366175 2025-11-03T14:01:06.283815Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [2:108:2] Inserted# false Subscription# {SessionId# [2:108:2] SubscriptionCookie# 0} NextSubscribeCookie# 8 2025-11-03T14:01:06.283853Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:551} TEvNodeConfigPush NodeId# 3 Cookie# 14210779570963366175 SessionId# [2:108:2] Binding# Record# {DeletedBoundNodeIds { Host: "127.0.0.7" Port: 19001 NodeId: 7 } DeletedBoundNodeIds { Host: "127.0.0.9" Port: 19001 NodeId: 9 } DeletedBoundNodeIds { Host: "127.0.0.4" Port: 19001 NodeId: 4 } DeletedBoundNodeIds { Host: "127.0.0.1" Port: 19001 NodeId: 1 } DeletedBoundNodeIds { Host: "127.0.0.5" Port: 19001 NodeId: 5 } } RootNodeId# 2 2025-11-03T14:01:06.283863Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [2:108:2] Inserted# false Subscription# {SessionId# [2:108:2] SubscriptionCookie# 0} NextSubscribeCookie# 8 2025-11-03T14:01:06.283870Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.7:19001/7 2025-11-03T14:01:06.283876Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.9:19001/9 2025-11-03T14:01:06.283882Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.4:19001/4 2025-11-03T14:01:06.283887Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.1:19001/1 2025-11-03T14:01:06.283892Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 3 NodeId# 127.0.0.5:19001/5 2025-11-03T14:01:06.283971Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:235:51] SessionId# [7:146:1] Cookie# 12478074231622998491 2025-11-03T14:01:06.283981Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [7:146:1] Inserted# false Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:06.283995Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 12478074231622998491 SessionId# [7:146:1] Binding# {1.1/12478074231622998491@[7:146:1]} Record# {RootNodeId: 2 } 2025-11-03T14:01:06.284012Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:235:51] SessionId# [4:137:1] Cookie# 7113503852082071001 2025-11-03T14:01:06.284020Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [4:137:1] Inserted# false Subscription# {SessionId# [4:137:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:01:06.284029Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 7113503852082071001 SessionId# [4:137:1] Binding# {1.1/7113503852082071001@[4:137:1]} Record# {RootNodeId: 2 } 2025-11-03T14:01:06.284045Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:235:51] SessionId# [5:140:1] Cookie# 16745789488680382382 2025-11-03T14:01:06.284052Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [5:140:1] Inserted# false Subscription# {SessionId# [5:140:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:01:06.284060Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 16745789488680382382 SessionId# [5:140:1] Binding# {1.1/16745789488680382382@[5:140:1]} Record# {RootNodeId: 2 } 2025-11-03T14:01:06.284485Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [5:263:20] SessionId# [9:64:5] Cookie# 6004445114694099301 2025-11-03T14:01:06.284501Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [9:64:5] Inserted# false Subscription# {SessionId# [9:64:5] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:06.284517Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 5 Cookie# 6004445114694099301 SessionId# [9:64:5] Binding# {5.1/6004445114694099301@[9:64:5]} Record# {RootNodeId: 2 } 2025-11-03T14:01:06.285765Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:06.285785Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:06.285792Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:06.285798Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:06.285804Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:06.285811Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:06.285817Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:06.285823Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:06.285829Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:06.285835Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:06.286471Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639257 Sender# [1:349:57] SessionId# [0:0:0] Cookie# 0 2025-11-03T14:01:06.286569Z 1 00h00m00.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:06.286586Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [1:235:51] SessionId# [2:131:1] Cookie# 12620356791158840077 2025-11-03T14:01:06.286596Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 8 2025-11-03T14:01:06.286615Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:551} TEvNodeConfigPush NodeId# 1 Cookie# 12620356791158840077 SessionId# [2:131:1] Binding# Record# {CacheUpdate { } } RootNodeId# 2 2025-11-03T14:01:06.286665Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 8 2025-11-03T14:01:06.286696Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:235:51] SessionId# [7:146:1] Cookie# 12478074231622998491 2025-11-03T14:01:06.286703Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [7:146:1] Inserted# false Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:06.286720Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 12478074231622998491 SessionId# [7:146:1] Binding# {1.2/12478074231622998491@[7:146:1]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-11-03T14:01:06.286736Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:235:51] SessionId# [4:137:1] Cookie# 7113503852082071001 2025-11-03T14:01:06.286743Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [4:137:1] Inserted# false Subscription# {SessionId# [4:137:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:01:06.286753Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 7113503852082071001 SessionId# [4:137:1] Binding# {1.2/7113503852082071001@[4:137:1]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-11-03T14:01:06.286766Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:235:51] SessionId# [5:140:1] Cookie# 16745789488680382382 2025-11-03T14:01:06.286773Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [5:140:1] Inserted# false Subscription# {SessionId# [5:140:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:01:06.286783Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 16745789488680382382 SessionId# [5:140:1] Binding# {1.2/16745789488680382382@[5:140:1]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-11-03T14:01:06.286798Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:06.286811Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:1 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:06.286839Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:06.286846Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:06.286852Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:06.286857Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:06.286863Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:06.286869Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:06.286874Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:06.286880Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:06.286885Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:06.286891Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-0 ... 94037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:06.298649Z 1 00h00m06.812761s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:06.298655Z 1 00h00m06.812761s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:06.298661Z 1 00h00m06.812761s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:06.298666Z 1 00h00m06.812761s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:06.298671Z 1 00h00m06.812761s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:06.298676Z 1 00h00m06.812761s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:06.298681Z 1 00h00m06.812761s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:06.298687Z 1 00h00m06.812761s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:06.298693Z 1 00h00m06.812761s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:06.298698Z 1 00h00m06.812761s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:06.298705Z 1 00h00m06.812761s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:06.298712Z 1 00h00m06.812761s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:06.298717Z 1 00h00m06.812761s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:06.298723Z 1 00h00m06.812761s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:06.298728Z 1 00h00m06.812761s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:06.298738Z 1 00h00m06.812761s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:06.298744Z 1 00h00m06.812761s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:06.298749Z 1 00h00m06.812761s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:06.298757Z 1 00h00m06.812761s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:06.298763Z 1 00h00m06.812761s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:06.298777Z 1 00h00m06.812761s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:24343667:0] : 154}, {[1:4398070854771:0] : 158}, {[1:2199047599219:0] : 156}, {[1:3298559226995:0] : 157}, {[1:1099535971443:0] : 155}}}} 2025-11-03T14:01:06.298790Z 1 00h00m06.812761s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 5 Signature: {{[1:9895628993651:0] : 163}, {[1:7696605738099:0] : 161}, {[1:8796117365875:0] : 162}, {[1:6597094110323:0] : 160}, {[1:5497582482547:0] : 159}}}} 2025-11-03T14:01:06.298802Z 1 00h00m06.812761s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvInfo ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 10 Signature: {{[1:8796117365875:0] : 162}, {[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:9895628993651:0] : 163}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} 2025-11-03T14:01:06.299810Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-11-03T14:01:06.299829Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-11-03T14:01:06.299841Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:1 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-11-03T14:01:06.299851Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-11-03T14:01:06.299859Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:06.299865Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-11-03T14:01:06.299871Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-11-03T14:01:06.299876Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-11-03T14:01:06.299882Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-11-03T14:01:06.299888Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:06.299893Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-11-03T14:01:06.299898Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-11-03T14:01:06.299903Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-11-03T14:01:06.299915Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:06.299924Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:06.299931Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:06.299940Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:06.299947Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:06.299957Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:06.299964Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:06.299971Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:06.299978Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:06.299985Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:1 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:06.299999Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:24343667:0] : 154}, {[1:2199047599219:0] : 156}, {[1:1099535971443:0] : 155}}}} 2025-11-03T14:01:06.300012Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::HandleTEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 3 Signature: {{[1:7696605738099:0] : 161}, {[1:6597094110323:0] : 160}, {[1:5497582482547:0] : 159}}}} 2025-11-03T14:01:06.300025Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvInfo ev: {EvInfo Status: 0 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [1:341:55] CurrentLeaderTablet: [1:349:57] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 Signature: { Size: 8 Signature: {{[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} 2025-11-03T14:01:06.300053Z 1 00h00m10.002048s :STATESTORAGE DEBUG: RingGroupProxyRequest::Reply TEvUpdateSignature ev: {EvUpdateSignature TabletID: 72057594037932033 Signature: { Size: 10 Signature: {{[1:8796117365875:0] : 162}, {[1:5497582482547:0] : 159}, {[1:2199047599219:0] : 156}, {[1:9895628993651:0] : 163}, {[1:6597094110323:0] : 160}, {[1:3298559226995:0] : 157}, {[1:24343667:0] : 154}, {[1:7696605738099:0] : 161}, {[1:4398070854771:0] : 158}, {[1:1099535971443:0] : 155}}}} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Block42 [GOOD] Test command err: RandomSeed# 17807007815873312963 |55.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane >> BSCMovePDisk::PDiskMove_ErasureNone >> BsControllerTest::TestLocalBrokenRelocation >> BSCStopPDisk::PDiskStop |54.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/large/ydb-tests-olap-s3_import-large >> BsControllerTest::TestLocalSelfHeal >> VDiskBalancing::TestStopOneNode_Block42 >> BSCMovePDisk::PDiskMove_Mirror3dc >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed |54.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |54.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> BSCMovePDisk::PDiskMove_ErasureNone [GOOD] >> BSCStopPDisk::PDiskStop [GOOD] |54.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest |54.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |54.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |54.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest |54.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |54.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |54.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |54.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes [GOOD] >> BsControllerTest::TestLocalSelfHeal [GOOD] >> BSCReadOnlyPDisk::ReadOnlyNotAllowed |54.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |54.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |54.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a >> Donor::ContinueWithFaultyDonor >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly |54.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/ydb-core-blobstorage-ut_blobstorage-ut_donor |54.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part1/ydb-library-yql-tests-sql-hybrid_file-part1 |54.3%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |54.3%| [LD] {RESULT} $(B)/ydb/core/ymq/ut/ydb-core-ymq-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/unittest >> BSCStopPDisk::PDiskStop [GOOD] Test command err: RandomSeed# 15398043956949152781 |54.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part18/ydb-library-yql-tests-sql-dq_file-part18 |54.3%| [LD] {RESULT} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |54.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_comp_defrag/core-blobstorage-ut_blobstorage-ut_comp_defrag |54.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/test_connection/ut/ydb-core-fq-libs-test_connection-ut |54.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |54.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_storage/internal/ut/core-fq-libs-control_plane_storage-internal-ut |54.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part17/ydb-library-yql-tests-sql-dq_file-part17 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc3Nodes [GOOD] Test command err: RandomSeed# 18359079790093264529 |54.3%| [LD] {RESULT} $(B)/ydb/library/yql/dq/comp_nodes/ut/ydb-library-yql-dq-comp_nodes-ut >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] >> VDiskBalancing::TestRandom_Mirror3dc >> BsControllerTest::TestLocalBrokenRelocation [GOOD] >> Donor::ContinueWithFaultyDonor [GOOD] >> BSCMovePDisk::PDiskMove_Mirror3dc [GOOD] >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive |54.1%| [LD] {RESULT} $(B)/yql/tools/yqlrun/yqlrun ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_ErasureNone [GOOD] Test command err: RandomSeed# 6491335796493657073 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestStopOneNode_Block42 [GOOD] Test command err: RandomSeed# 16141598315809991755 SEND TEvPut with key [1:1:1:0:0:100:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:100:0] 2025-11-03T14:01:07.900029Z 3 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:194:17] ServerId# [1:304:64] TabletId# 72057594037932033 PipeClientId# [3:194:17] 2025-11-03T14:01:07.900091Z 8 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:229:17] ServerId# [1:309:69] TabletId# 72057594037932033 PipeClientId# [8:229:17] 2025-11-03T14:01:07.900124Z 6 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:215:17] ServerId# [1:307:67] TabletId# 72057594037932033 PipeClientId# [6:215:17] 2025-11-03T14:01:07.900149Z 5 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:208:17] ServerId# [1:306:66] TabletId# 72057594037932033 PipeClientId# [5:208:17] 2025-11-03T14:01:07.900187Z 4 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:201:17] ServerId# [1:305:65] TabletId# 72057594037932033 PipeClientId# [4:201:17] 2025-11-03T14:01:07.900220Z 2 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:187:17] ServerId# [1:303:63] TabletId# 72057594037932033 PipeClientId# [2:187:17] 2025-11-03T14:01:07.900266Z 7 00h01m00.010512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:222:17] ServerId# [1:308:68] TabletId# 72057594037932033 PipeClientId# [7:222:17] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:100:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start compaction Finish compaction |54.2%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |54.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |54.0%| [LD] {RESULT} $(B)/ydb/core/ymq/base/ut/ydb-core-ymq-base-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetGoodDiskInBrokenGroupReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 3578412176508093207 2025-11-03T14:01:08.122528Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.122554Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.122562Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.122569Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.122577Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.122584Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.122591Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.122853Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.122872Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.122884Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.122896Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.122907Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.122919Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.122930Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.122945Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.122953Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.122959Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.122974Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.122983Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.122990Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.122996Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.123386Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.123397Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.123405Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.123416Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.123424Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.123432Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.123441Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 |54.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_testshard/ydb-core-blobstorage-ut_testshard |53.9%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |53.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot/ydb-core-blobstorage-ut_blobstorage-ut_blob_depot |53.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |53.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |53.8%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/read_metadata.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataOkErasureFiveParts [GOOD] Test command err: RandomSeed# 6268608145060071805 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:4:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 0 ] part 2: ver0 disks [ 7 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 6 0 ]; part 2 disks [ 7 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 0 ]; part 2 disks [ 7 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK |53.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part19/ydb-library-yql-tests-sql-dq_file-part19 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/unittest >> CheckIntegrityBlock42::DataErrorHeavySixPartsWithManyBroken [GOOD] Test command err: RandomSeed# 8129314687075620778 *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:4:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 ], ver1 disks [ 7 ], ver2 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] ERROR: There are unequal parts Erasure info: { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: ver0 disks [ 5 ] Erasure info: { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 0 ] part 2: ver0 disks [ 1 ] part 3: ver0 disks [ 2 ] part 4: ver0 disks [ 3 ] part 5: ver0 disks [ 4 ] part 6: Erasure info: ERROR: There are erasure restore fails *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:5:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:6:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:7:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:0:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:1:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:2:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:1] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:2] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:3] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:4] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:5] TO [82000000:1:0:3:0] FINISHED WITH OK *** *** PUT BLOB [72075186270680851:57:3905:6:786432:1024:6] TO [82000000:1:0:3:0] FINISHED WITH OK *** Disks: 0: [82000000:1:0:5:0] 1: [82000000:1:0:6:0] 2: [82000000:1:0:7:0] 3: [82000000:1:0:0:0] 4: [82000000:1:0:1:0] 5: [82000000:1:0:2:0] 6: [82000000:1:0:3:0] 7: [82000000:1:0:4:0] Layout info: part 1: ver0 disks [ 6 ], ver1 disks [ 0 ] part 2: ver0 disks [ 6 ], ver1 disks [ 1 ] part 3: ver0 disks [ 6 ], ver1 disks [ 2 ] part 4: ver0 disks [ 3 ], ver1 disks [ 6 ] part 5: ver0 disks [ 4 ], ver1 disks [ 6 ] part 6: ver0 disks [ 5 ], ver1 disks [ 6 ] ERROR: There are unequal parts Erasure info: { part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 1 disks [ 6 ] -> OK { part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 1 disks [ 0 ] -> OK { part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 2 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 2 disks [ 1 ] -> OK { part 1 disks [ 0 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 5 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 5 disks [ 4 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 3 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 4 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 3 disks [ 2 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 4 disks [ 3 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 6 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 6 disks [ 5 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 4 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 5 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 4 disks [ 3 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 5 disks [ 4 ]; } CHECK part 6 disks [ 5 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; } CHECK part 5 disks [ 6 ] -> OK { part 1 disks [ 6 ]; part 2 disks [ 6 ]; part 3 disks [ 6 ]; part 4 disks [ 6 ]; } CHECK part 6 disks [ 6 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 5 disks [ 4 ] -> OK { part 1 disks [ 0 ]; part 2 disks [ 1 ]; part 3 disks [ 2 ]; part 4 disks [ 3 ]; } CHECK part 6 disks [ 5 ] -> OK ERROR: There are erasure restore fails ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/unittest >> BSCMovePDisk::PDiskMove_Mirror3dc [GOOD] Test command err: RandomSeed# 5652117675203801433 >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestStateStorageUpdateSig [GOOD] Test command err: RandomSeed# 1552363607919634889 2025-11-03T14:01:03.487850Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639240 Sender# [1:220:36] SessionId# [5:140:1] Cookie# 17287335616143847624 2025-11-03T14:01:03.487870Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [5:140:1] Inserted# false Subscription# {SessionId# [5:140:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-11-03T14:01:03.487875Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:711} TEvNodeConfigUnbind NodeId# 1 Cookie# 17287335616143847624 SessionId# [5:140:1] Binding# 2025-11-03T14:01:03.487881Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:720} UnbindNode NodeId# 1 Reason# explicit unbind request 2025-11-03T14:01:03.487884Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.6:19001/6 2025-11-03T14:01:03.487889Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.7:19001/7 2025-11-03T14:01:03.487892Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.9:19001/9 2025-11-03T14:01:03.487895Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.4:19001/4 2025-11-03T14:01:03.487898Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.1:19001/1 2025-11-03T14:01:03.487901Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.8:19001/8 2025-11-03T14:01:03.487904Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.5:19001/5 2025-11-03T14:01:03.487907Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.2:19001/2 2025-11-03T14:01:03.487910Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.3:19001/3 2025-11-03T14:01:03.487917Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:314} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [5:140:1] SubscriptionCookie# 0} 2025-11-03T14:01:03.487924Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 4 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-11-03T14:01:03.487931Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:180} Initiated bind NodeId# 4 Binding# {4.0/5818848865611411609@[0:0:0]} SessionId# [0:0:0] 2025-11-03T14:01:03.487942Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 9673629300373724946 2025-11-03T14:01:03.487946Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:01:03.488811Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 9673629300373724946 SessionId# [2:131:1] Binding# {1.5/9673629300373724946@[2:131:1]} Record# {RootNodeId: 1 } 2025-11-03T14:01:03.488829Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [3:134:1] Cookie# 4975719363963513853 2025-11-03T14:01:03.488835Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [3:134:1] Inserted# false Subscription# {SessionId# [3:134:1] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-11-03T14:01:03.488845Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 4975719363963513853 SessionId# [3:134:1] Binding# {1.5/4975719363963513853@[3:134:1]} Record# {RootNodeId: 1 } 2025-11-03T14:01:03.488874Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [4:241:20] SessionId# [7:77:4] Cookie# 11417459709957075389 2025-11-03T14:01:03.488881Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 4 SessionId# [7:77:4] Inserted# false Subscription# {SessionId# [7:77:4] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:03.488893Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 4 Cookie# 11417459709957075389 SessionId# [7:77:4] Binding# {4.1/11417459709957075389@[7:77:4]} Record# {RootNodeId: 5 } 2025-11-03T14:01:03.488905Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639240 Sender# [5:248:20] SessionId# [3:91:4] Cookie# 5818848865611411608 2025-11-03T14:01:03.488910Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 5 SessionId# [3:91:4] Inserted# false Subscription# {SessionId# [3:91:4] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-11-03T14:01:03.488916Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:711} TEvNodeConfigUnbind NodeId# 5 Cookie# 5818848865611411608 SessionId# [3:91:4] Binding# {1.1/4975719363963513853@[3:134:1]} 2025-11-03T14:01:03.488922Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:720} UnbindNode NodeId# 5 Reason# explicit unbind request 2025-11-03T14:01:03.488927Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.6:19001/6 2025-11-03T14:01:03.488932Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.7:19001/7 2025-11-03T14:01:03.488936Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.9:19001/9 2025-11-03T14:01:03.488941Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.4:19001/4 2025-11-03T14:01:03.488945Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.1:19001/1 2025-11-03T14:01:03.488951Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.8:19001/8 2025-11-03T14:01:03.488956Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.2:19001/2 2025-11-03T14:01:03.488960Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.5:19001/5 2025-11-03T14:01:03.488966Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 5 NodeId# 127.0.0.3:19001/3 2025-11-03T14:01:03.488975Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:314} UnsubscribeInterconnect NodeId# 5 Subscription# {SessionId# [3:91:4] SubscriptionCookie# 0} 2025-11-03T14:01:03.488990Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [5:248:20] SessionId# [1:139:4] Cookie# 17287335616143847624 2025-11-03T14:01:03.489000Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 5 Cookie# 17287335616143847624 SessionId# [1:139:4] Binding# Record# {RootNodeId: 5 } 2025-11-03T14:01:03.489013Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [6:255:20] SessionId# [8:44:6] Cookie# 4294091783347956743 2025-11-03T14:01:03.489020Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 6 SessionId# [8:44:6] Inserted# false Subscription# {SessionId# [8:44:6] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:03.489029Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 6 Cookie# 4294091783347956743 SessionId# [8:44:6] Binding# {6.1/4294091783347956743@[8:44:6]} Record# {RootNodeId: 5 } 2025-11-03T14:01:03.489040Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [6:255:20] SessionId# [9:47:6] Cookie# 9029801487543483925 2025-11-03T14:01:03.489047Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 6 SessionId# [9:47:6] Inserted# false Subscription# {SessionId# [9:47:6] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:03.489055Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 6 Cookie# 9029801487543483925 SessionId# [9:47:6] Binding# {6.1/9029801487543483925@[9:47:6]} Record# {RootNodeId: 5 } 2025-11-03T14:01:03.489112Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 131077 Sender# [5:71:4] SessionId# [0:0:0] Cookie# 6 2025-11-03T14:01:03.489121Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:215} TEvNodeConnected NodeId# 4 SessionId# [5:71:4] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2025-11-03T14:01:03.489128Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:251} Continuing bind Binding# {4.0/5818848865611411609@[0:0:0]} 2025-11-03T14:01:03.489166Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [4:89:3] Cookie# 10426503802938909883 2025-11-03T14:01:03.489173Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [4:89:3] Inserted# false Subscription# {SessionId# [4:89:3] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:01:03.489184Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 10426503802938909883 SessionId# [4:89:3] Binding# {3.5/10426503802938909883@[4:89:3]} Record# {RootNodeId: 1 } 2025-11-03T14:01:03.489198Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [5:92:3] Cookie# 5818848865611411608 2025-11-03T14:01:03.489207Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 5818848865611411608 SessionId# [5:92:3] Binding# {4.0/5818848865611411609@[5:71:4]} Record# {RootNodeId: 1 } 2025-11-03T14:01:03.489218Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [6:95:3] Cookie# 2286634777816737371 2025-11-03T14:01:03.489224Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [6:95:3] Inserted# false Subscription# {SessionId# [6:95:3] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-11-03T14:01:03.489233Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 2286634777816737371 SessionId# [6:95:3] Binding# {3.5/2286634777816737371@[6:95:3]} Record# {RootNodeId: 1 } 2025-11-03T14:01:03.489384Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [3:234:20] SessionId# [1:133:2] Cookie# 4975719363963513853 2025-11-03T14:01:03.489393Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [1:133:2] Inserted# false Subscription# {SessionId# [1:133:2] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-11-03T14:01:03.489419Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:551} TEvNodeConfigPush NodeId# 3 Cookie# 4975719363963513853 SessionId# [1:133:2] Binding# Record# {DeletedBoundNodeIds { Host: "127.0.0.1" Port: 19001 NodeId: 1 } DeletedBoundNodeIds { Host: "127.0.0.2" Port: 19001 NodeId: 2 } DeletedBou ... 1 00h00m00.360646s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:03.495620Z 1 00h00m00.360646s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:03.495623Z 1 00h00m00.360646s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:03.495627Z 1 00h00m00.360646s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:03.495630Z 1 00h00m00.360646s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:03.495633Z 1 00h00m00.360646s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:03.495636Z 1 00h00m00.360646s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.495640Z 1 00h00m00.360646s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.495643Z 1 00h00m00.360646s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.495646Z 1 00h00m00.360646s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.495650Z 1 00h00m00.360646s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.495672Z 1 00h00m00.795761s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:03.495677Z 1 00h00m00.795761s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:03.495680Z 1 00h00m00.795761s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:03.495683Z 1 00h00m00.795761s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:03.495686Z 1 00h00m00.795761s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:03.495689Z 1 00h00m00.795761s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:03.495693Z 1 00h00m00.795761s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.495697Z 1 00h00m00.795761s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.495701Z 1 00h00m00.795761s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.495704Z 1 00h00m00.795761s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.495710Z 1 00h00m00.795761s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.495952Z 1 00h00m01.665991s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:03.495962Z 1 00h00m01.665991s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:03.495965Z 1 00h00m01.665991s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:03.495969Z 1 00h00m01.665991s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:03.495972Z 1 00h00m01.665991s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:03.495975Z 1 00h00m01.665991s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:03.495980Z 1 00h00m01.665991s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.495983Z 1 00h00m01.665991s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.495986Z 1 00h00m01.665991s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.495990Z 1 00h00m01.665991s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.495993Z 1 00h00m01.665991s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.496307Z 1 00h00m03.510878s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:03.496315Z 1 00h00m03.510878s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:03.496318Z 1 00h00m03.510878s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:03.496322Z 1 00h00m03.510878s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:03.496325Z 1 00h00m03.510878s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:03.496328Z 1 00h00m03.510878s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:03.496332Z 1 00h00m03.510878s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.496338Z 1 00h00m03.510878s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.496341Z 1 00h00m03.510878s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.496345Z 1 00h00m03.510878s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.496348Z 1 00h00m03.510878s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.497023Z 1 00h00m07.643424s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:03.497038Z 1 00h00m07.643424s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:03.497044Z 1 00h00m07.643424s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:03.497050Z 1 00h00m07.643424s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:03.497053Z 1 00h00m07.643424s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:03.497057Z 1 00h00m07.643424s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:03.497061Z 1 00h00m07.643424s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.497065Z 1 00h00m07.643424s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.497069Z 1 00h00m07.643424s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.497074Z 1 00h00m07.643424s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.497079Z 1 00h00m07.643424s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.497653Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-11-03T14:01:03.497663Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-11-03T14:01:03.497668Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:03.497671Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-11-03T14:01:03.497674Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-11-03T14:01:03.497678Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-11-03T14:01:03.497685Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:03.497692Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:03.497698Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:03.497704Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig undelivered ringGroup:0 for: 3 2025-11-03T14:01:03.497709Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ContinueWithFaultyDonor [GOOD] Test command err: RandomSeed# 1497126414981668743 2025-11-03T14:01:08.781896Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:01:08.782135Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 1945801883243992855] 2025-11-03T14:01:08.782876Z 1 00h01m14.361024s :BS_SYNCER ERROR: PDiskId# 1001 VDISK[82000000:_:0:0:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 0 PartsResurrected# 0 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::SetBrokenDiskInBrokenGroupReadOnly [GOOD] Test command err: RandomSeed# 13916254824661724472 2025-11-03T14:01:08.667141Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667162Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667167Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667172Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667177Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667182Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667187Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667192Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667362Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.667377Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.667387Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.667397Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.667406Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.667414Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.667422Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.667430Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.667441Z 1 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667446Z 6 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667452Z 7 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667456Z 8 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667460Z 2 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667464Z 3 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667468Z 4 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667472Z 5 00h00m30.010512s :BS_VDISK_OTHER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) CheckPDiskResponse: Recoverable error from PDisk: Status# 'CORRUPTED' StatusFlags# 'None' ErrorReason# 'Some error reason' 2025-11-03T14:01:08.667753Z 1 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.667759Z 6 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.667764Z 7 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.667768Z 8 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.667773Z 2 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.667777Z 3 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.667781Z 4 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.667786Z 5 00h00m30.010512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) SkeletonFront: got TEvPDiskErrorStateChange;State# NoWrites, PDiskError# Some error reason Marker# BSVSF03 2025-11-03T14:01:08.685483Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:1:0]} 2025-11-03T14:01:08.685500Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:2:0]} 2025-11-03T14:01:08.685505Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:3:0]} 2025-11-03T14:01:08.685512Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:4:0]} 2025-11-03T14:01:08.685516Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:5:0]} 2025-11-03T14:01:08.685521Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:6:0]} 2025-11-03T14:01:08.685525Z 1 00h01m30.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidProxyBase::Handle(TEvVSyncGuidResult): NOT OK: msg# {TEvVSyncGuidResult: Status# VDISK_ERROR_STATE VDiskId# [82000000:1:0:7:0]} |53.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/compatibility/binaries/downloader/downloader |53.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part16/ydb-library-yql-tests-sql-dq_file-part16 |53.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest |53.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |53.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::ReadOnlyNotAllowed [GOOD] Test command err: RandomSeed# 724234806376528427 |53.1%| [TS] {RESULT} ydb/tests/stress/ctas/tests/import_test >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] |53.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part10/ydb-library-yql-tests-sql-dq_file-part10 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestBoardConfigMismatch [GOOD] Test command err: RandomSeed# 17703282798002926759 2025-11-03T14:01:06.371291Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639240 Sender# [1:220:36] SessionId# [5:140:1] Cookie# 1540772179720059965 2025-11-03T14:01:06.371317Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [5:140:1] Inserted# false Subscription# {SessionId# [5:140:1] SubscriptionCookie# 0} NextSubscribeCookie# 7 2025-11-03T14:01:06.371326Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC16@distconf_binding.cpp:711} TEvNodeConfigUnbind NodeId# 1 Cookie# 1540772179720059965 SessionId# [5:140:1] Binding# {4.1/12731101861817089215@[5:71:4]} 2025-11-03T14:01:06.371334Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:720} UnbindNode NodeId# 1 Reason# explicit unbind request 2025-11-03T14:01:06.371341Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.9:19001/9 2025-11-03T14:01:06.371349Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.8:19001/8 2025-11-03T14:01:06.371354Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.3:19001/3 2025-11-03T14:01:06.371360Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.1:19001/1 2025-11-03T14:01:06.371371Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.4:19001/4 2025-11-03T14:01:06.371377Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.2:19001/2 2025-11-03T14:01:06.371383Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.6:19001/6 2025-11-03T14:01:06.371394Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:314} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [5:140:1] SubscriptionCookie# 0} 2025-11-03T14:01:06.371418Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [8:149:1] Cookie# 7195306111523451008 2025-11-03T14:01:06.371425Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [8:149:1] Inserted# false Subscription# {SessionId# [8:149:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:06.372993Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 7195306111523451008 SessionId# [8:149:1] Binding# {1.5/7195306111523451008@[8:149:1]} Record# {RootNodeId: 1 } 2025-11-03T14:01:06.373041Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 11507966948981427824 2025-11-03T14:01:06.373054Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:01:06.373072Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 11507966948981427824 SessionId# [2:131:1] Binding# {1.5/11507966948981427824@[2:131:1]} Record# {RootNodeId: 1 } 2025-11-03T14:01:06.373091Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [3:134:1] Cookie# 12635851880243090171 2025-11-03T14:01:06.373098Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [3:134:1] Inserted# false Subscription# {SessionId# [3:134:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:01:06.373110Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 12635851880243090171 SessionId# [3:134:1] Binding# {1.5/12635851880243090171@[3:134:1]} Record# {RootNodeId: 1 } 2025-11-03T14:01:06.373127Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [6:143:1] Cookie# 15459025474711711447 2025-11-03T14:01:06.373134Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [6:143:1] Inserted# false Subscription# {SessionId# [6:143:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:06.373142Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 15459025474711711447 SessionId# [6:143:1] Binding# {1.5/15459025474711711447@[6:143:1]} Record# {RootNodeId: 1 } 2025-11-03T14:01:06.373188Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [4:241:20] SessionId# [9:83:4] Cookie# 16393753774220530673 2025-11-03T14:01:06.373196Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 4 SessionId# [9:83:4] Inserted# false Subscription# {SessionId# [9:83:4] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:06.373206Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 4 Cookie# 16393753774220530673 SessionId# [9:83:4] Binding# {4.1/16393753774220530673@[9:83:4]} Record# {RootNodeId: 5 } 2025-11-03T14:01:06.373220Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [4:241:20] SessionId# [5:71:4] Cookie# 12731101861817089215 2025-11-03T14:01:06.373226Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 4 SessionId# [5:71:4] Inserted# false Subscription# {SessionId# [5:71:4] SubscriptionCookie# 0} NextSubscribeCookie# 7 2025-11-03T14:01:06.373235Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 4 Cookie# 12731101861817089215 SessionId# [5:71:4] Binding# {4.1/12731101861817089215@[5:71:4]} Record# {RootNodeId: 5 } 2025-11-03T14:01:06.373244Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC03@distconf_binding.cpp:366} AbortBinding Binding# {4.1/12731101861817089215@[5:71:4]} Reason# binding cycle 2025-11-03T14:01:06.373251Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC24@distconf_scatter_gather.cpp:87} AbortAllScatterTasks Binding# {4.1/12731101861817089215@[5:71:4]} 2025-11-03T14:01:06.373261Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:314} UnsubscribeInterconnect NodeId# 4 Subscription# {SessionId# [5:71:4] SubscriptionCookie# 0} 2025-11-03T14:01:06.373268Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 7 2025-11-03T14:01:06.373276Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:180} Initiated bind NodeId# 2 Binding# {2.0/12731101861817089216@[0:0:0]} SessionId# [0:0:0] 2025-11-03T14:01:06.373340Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [1:220:36] SessionId# [7:146:1] Cookie# 1540772179720059966 2025-11-03T14:01:06.373428Z 7 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:551} TEvNodeConfigPush NodeId# 1 Cookie# 1540772179720059966 SessionId# [7:146:1] Binding# {5.1/6207126951679810470@[7:58:5]} Record# {Initial: true BoundNodes { NodeId { Host: "127.0.0.6" Port: 19001 NodeId: 6 } Meta { Fingerprint: "\3403\207\365\032>> TStateStorageRingGroupState::TestStateStorageDoubleReply [GOOD] Test command err: RandomSeed# 15614079706898124858 2025-11-03T14:01:03.163608Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [3:109:2] Cookie# 12107155749032567837 2025-11-03T14:01:03.163627Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [3:109:2] Inserted# false Subscription# {SessionId# [3:109:2] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:01:03.164516Z 3 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 2 Cookie# 12107155749032567837 SessionId# [3:109:2] Binding# {2.1/12107155749032567837@[3:109:2]} Record# {RootNodeId: 6 } 2025-11-03T14:01:03.164559Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [4:112:2] Cookie# 7905059572080463478 2025-11-03T14:01:03.164567Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [4:112:2] Inserted# false Subscription# {SessionId# [4:112:2] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:01:03.164579Z 4 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 2 Cookie# 7905059572080463478 SessionId# [4:112:2] Binding# {2.1/7905059572080463478@[4:112:2]} Record# {RootNodeId: 6 } 2025-11-03T14:01:03.164595Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [5:115:2] Cookie# 4604765801628977684 2025-11-03T14:01:03.164601Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [5:115:2] Inserted# false Subscription# {SessionId# [5:115:2] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:03.164609Z 5 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 2 Cookie# 4604765801628977684 SessionId# [5:115:2] Binding# {2.1/4604765801628977684@[5:115:2]} Record# {RootNodeId: 6 } 2025-11-03T14:01:03.164655Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [6:255:20] SessionId# [1:142:5] Cookie# 1694639031392820982 2025-11-03T14:01:03.164664Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 6 SessionId# [1:142:5] Inserted# false Subscription# {SessionId# [1:142:5] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-11-03T14:01:03.164673Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 6 Cookie# 1694639031392820982 SessionId# [1:142:5] Binding# {6.6/1694639031392820982@[1:142:5]} Record# {RootNodeId: 1 } 2025-11-03T14:01:03.164681Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC03@distconf_binding.cpp:366} AbortBinding Binding# {6.6/1694639031392820982@[1:142:5]} Reason# binding cycle 2025-11-03T14:01:03.164688Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC24@distconf_scatter_gather.cpp:87} AbortAllScatterTasks Binding# {6.6/1694639031392820982@[1:142:5]} 2025-11-03T14:01:03.164696Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:314} UnsubscribeInterconnect NodeId# 6 Subscription# {SessionId# [1:142:5] SubscriptionCookie# 0} 2025-11-03T14:01:03.164713Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [6:255:20] SessionId# [9:47:6] Cookie# 14595989724248427733 2025-11-03T14:01:03.164720Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 6 SessionId# [9:47:6] Inserted# false Subscription# {SessionId# [9:47:6] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-11-03T14:01:03.164730Z 9 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 6 Cookie# 14595989724248427733 SessionId# [9:47:6] Binding# {6.6/14595989724248427733@[9:47:6]} Record# {RootNodeId: 1 } 2025-11-03T14:01:03.164743Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [2:227:20] SessionId# [1:130:1] Cookie# 17142586422143351118 2025-11-03T14:01:03.164752Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 6 2025-11-03T14:01:03.164777Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:551} TEvNodeConfigPush NodeId# 2 Cookie# 17142586422143351118 SessionId# [1:130:1] Binding# Record# {BoundNodes { NodeId { Host: "127.0.0.1" Port: 19001 NodeId: 1 } Meta { Fingerprint: "\3403\207\365\032> 2025-11-03T14:01:03.165052Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC06@distconf_binding.cpp:720} UnbindNode NodeId# 1 Reason# explicit unbind request 2025-11-03T14:01:03.165055Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.6:19001/6 2025-11-03T14:01:03.165058Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.7:19001/7 2025-11-03T14:01:03.165062Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.9:19001/9 2025-11-03T14:01:03.165065Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.4:19001/4 2025-11-03T14:01:03.165068Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.1:19001/1 2025-11-03T14:01:03.165072Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.8:19001/8 2025-11-03T14:01:03.165075Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.2:19001/2 2025-11-03T14:01:03.165078Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.5:19001/5 2025-11-03T14:01:03.165081Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC34@distconf_binding.cpp:513} DeleteBound RefererNodeId# 1 NodeId# 127.0.0.3:19001/3 2025-11-03T14:01:03.165086Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC55@distconf_binding.cpp:314} UnsubscribeInterconnect NodeId# 1 Subscription# {SessionId# [6:143:1] SubscriptionCookie# 0} 2025-11-03T14:01:03.165092Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [0:0:0] Inserted# true Subscription# {SessionId# [0:0:0] SubscriptionCookie# 0} NextSubscribeCookie# 7 2025-11-03T14:01:03.165097Z 6 00h00m00.000000s :BS_NODE DEBUG: {NWDC29@distconf_binding.cpp:180} Initiated bind NodeId# 2 Binding# {2.0/18042321814194367784@[0:0:0]} SessionId# [0:0:0] 2025-11-03T14:01:03.165106Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 17142586422143351118 2025-11-03T14:01:03.165109Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 5 2025-11-03T14:01:03.165115Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 17142586422143351118 SessionId# [2:131:1] Binding# {1.6/17142586422143351118@[2:131:1]} Record# {RootNodeId: 1 } 2025-11-03T14:01:03.165287Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [7:262:20] SessionId# [8:29:7] Cookie# 17639783762032940218 2025-11-03T14:01:03.165295Z 8 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} Subsc ... ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.171634Z 1 00h00m00.888923s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.171637Z 1 00h00m00.888923s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.171640Z 1 00h00m00.888923s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.171643Z 1 00h00m00.888923s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.171865Z 1 00h00m01.862872s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:03.171872Z 1 00h00m01.862872s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:03.171876Z 1 00h00m01.862872s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:03.171880Z 1 00h00m01.862872s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:03.171885Z 1 00h00m01.862872s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:03.171890Z 1 00h00m01.862872s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:03.171896Z 1 00h00m01.862872s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.171901Z 1 00h00m01.862872s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.171908Z 1 00h00m01.862872s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.171913Z 1 00h00m01.862872s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.171918Z 1 00h00m01.862872s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.172363Z 1 00h00m03.947122s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:03.172373Z 1 00h00m03.947122s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:03.172378Z 1 00h00m03.947122s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:03.172381Z 1 00h00m03.947122s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:03.172385Z 1 00h00m03.947122s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:03.172388Z 1 00h00m03.947122s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:03.172392Z 1 00h00m03.947122s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.172396Z 1 00h00m03.947122s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.172400Z 1 00h00m03.947122s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.172403Z 1 00h00m03.947122s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.172406Z 1 00h00m03.947122s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.173155Z 1 00h00m08.240677s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:03.173166Z 1 00h00m08.240677s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:03.173169Z 1 00h00m08.240677s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:03.173173Z 1 00h00m08.240677s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:03.173176Z 1 00h00m08.240677s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:03.173180Z 1 00h00m08.240677s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:03.173184Z 1 00h00m08.240677s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.173188Z 1 00h00m08.240677s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.173194Z 1 00h00m08.240677s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.173198Z 1 00h00m08.240677s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.173201Z 1 00h00m08.240677s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:03.173605Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigAsync} 2025-11-03T14:01:03.173613Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-11-03T14:01:03.173618Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:03.173621Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-11-03T14:01:03.173625Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-11-03T14:01:03.173628Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-11-03T14:01:03.173639Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:03.173644Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:03.173650Z 1 00h00m10.002048s :STATESTORAGE ERROR: TStateStorageProxyRequest::MergeReply duplicated TEvReplicaInfo cookie:0 replica:[1:24343667:0] signature:154 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:03.173654Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:03.173658Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:03.173662Z 1 00h00m10.002048s :STATESTORAGE ERROR: TStateStorageProxyRequest::MergeReply duplicated TEvReplicaInfo cookie:1 replica:[1:1099535971443:0] signature:155 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:03.173666Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:03.173671Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:03.173677Z 1 00h00m10.002048s :STATESTORAGE ERROR: TStateStorageProxyRequest::HandleUpdateSig duplicated TEvReplicaInfo cookie:2 replica:[1:2199047599219:0] signature:156 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:03.173681Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:03.173685Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:03.173689Z 1 00h00m10.002048s :STATESTORAGE ERROR: TStateStorageProxyRequest::HandleUpdateSig duplicated TEvReplicaInfo cookie:3 replica:[1:3298559226995:0] signature:157 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:03.173692Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleUpdateSig ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} |53.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part1/ydb-library-yql-tests-sql-dq_file-part1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/unittest >> BSCReadOnlyPDisk::RestartAndReadOnlyConsecutive [GOOD] Test command err: RandomSeed# 3829211826114832033 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorage2RingGroups::TestStateStorageReplyOnce [GOOD] Test command err: RandomSeed# 1797181160164582257 2025-11-03T14:01:06.786070Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 131077 Sender# [2:131:1] SessionId# [0:0:0] Cookie# 6 2025-11-03T14:01:06.786096Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC14@distconf_binding.cpp:215} TEvNodeConnected NodeId# 1 SessionId# [2:131:1] Cookie# 6 CookieInFlight# true SubscriptionExists# true 2025-11-03T14:01:06.786106Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC09@distconf_binding.cpp:251} Continuing bind Binding# {1.0/1281288696482720116@[0:0:0]} 2025-11-03T14:01:06.786136Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [6:270:20] SessionId# [1:142:5] Cookie# 15629638698396327117 2025-11-03T14:01:06.786147Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 6 SessionId# [1:142:5] Inserted# false Subscription# {SessionId# [1:142:5] SubscriptionCookie# 0} NextSubscribeCookie# 7 2025-11-03T14:01:06.787679Z 1 00h00m00.000000s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 6 Cookie# 15629638698396327117 SessionId# [1:142:5] Binding# {6.0/15629638698396327117@[1:142:5]} Record# {RootNodeId: 2 } 2025-11-03T14:01:06.787732Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [6:270:20] SessionId# [2:117:5] Cookie# 2626357891313733636 2025-11-03T14:01:06.787745Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 6 SessionId# [2:117:5] Inserted# false Subscription# {SessionId# [2:117:5] SubscriptionCookie# 0} NextSubscribeCookie# 7 2025-11-03T14:01:06.787807Z 2 00h00m00.000000s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:551} TEvNodeConfigPush NodeId# 6 Cookie# 2626357891313733636 SessionId# [2:117:5] Binding# {1.0/1281288696482720116@[2:131:1]} Record# {BoundNodes { NodeId { Host: "127.0.0.7" Port: 19001 NodeId: 7 } Meta { Fingerprint: "\3403\207\365\032>> BsControllerTest::SelfHealMirror3dc [GOOD] Test command err: 2025-11-03T14:00:57.961636Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-11-03T14:00:57.961659Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-11-03T14:00:57.961675Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-11-03T14:00:57.961679Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-11-03T14:00:57.961685Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-11-03T14:00:57.961688Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-11-03T14:00:57.961694Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-11-03T14:00:57.961698Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-11-03T14:00:57.961708Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-11-03T14:00:57.961712Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-11-03T14:00:57.961718Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-11-03T14:00:57.961722Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-11-03T14:00:57.961728Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-11-03T14:00:57.961732Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-11-03T14:00:57.961737Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-11-03T14:00:57.961741Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-11-03T14:00:57.961746Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-11-03T14:00:57.961750Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-11-03T14:00:57.961756Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-11-03T14:00:57.961760Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-11-03T14:00:57.961766Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-11-03T14:00:57.961769Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-11-03T14:00:57.961775Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-11-03T14:00:57.961779Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-11-03T14:00:57.961784Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-11-03T14:00:57.961788Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-11-03T14:00:57.961793Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-11-03T14:00:57.961797Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-11-03T14:00:57.961803Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-11-03T14:00:57.961807Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-11-03T14:00:57.961813Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-11-03T14:00:57.961816Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-11-03T14:00:57.961822Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-11-03T14:00:57.961825Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-11-03T14:00:57.961831Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-11-03T14:00:57.961835Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-11-03T14:00:57.961841Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-11-03T14:00:57.961844Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-11-03T14:00:57.961850Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-11-03T14:00:57.961854Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-11-03T14:00:57.961874Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-11-03T14:00:57.961880Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-11-03T14:00:57.961886Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-11-03T14:00:57.961890Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-11-03T14:00:57.961896Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-11-03T14:00:57.961899Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-11-03T14:00:57.961905Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-11-03T14:00:57.961909Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-11-03T14:00:57.961914Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-11-03T14:00:57.961918Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-11-03T14:00:57.961924Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-11-03T14:00:57.961928Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-11-03T14:00:57.961934Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-11-03T14:00:57.961940Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-11-03T14:00:57.961947Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-11-03T14:00:57.961951Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-11-03T14:00:57.961956Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-11-03T14:00:57.961960Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-11-03T14:00:57.961971Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-11-03T14:00:57.961975Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-11-03T14:00:57.961980Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-11-03T14:00:57.961984Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-11-03T14:00:57.961990Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-11-03T14:00:57.961994Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-11-03T14:00:57.961999Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-11-03T14:00:57.962003Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-11-03T14:00:57.962009Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-11-03T14:00:57.962012Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-11-03T14:00:57.962018Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-11-03T14:00:57.962022Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-11-03T14:00:57.962027Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-11-03T14:00:57.962031Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-11-03T14:00:57.966126Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2025-11-03T14:00:57.966411Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2025-11-03T14:00:57.966421Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2025-11-03T14:00:57.966427Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2025-11-03T14:00:57.966434Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2025-11-03T14:00:57.966440Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2025-11-03T14:00:57.966446Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2025-11-03T14:00:57.966461Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2025-11-03T14:00:57.966467Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2025-11-03T14:00:57.966474Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2025-11-03T14:00:57.966481Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2025-11-03T14:00:57.966487Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2025-11-03T14:00:57.966494Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2025-11-03T14:00:57.966501Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2025-11-03T14:00:57.966508Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2025-11-03T14:00:57.966515Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2025-11-03T14:00:57.966521Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2025-11-03T14:00:57.966527Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2025-11-03T14:00:57.966534Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2025-11-03T14:00:57.966561Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2025-11-03T14:00:57.966567Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2025-11-03T14:00:57.966574Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2025-11-03T14:00:57.966581Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2025-11-03T14:00:57.966588Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2025-11-03T14:00:57.966594Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2025-11-03T14:00:57.966601Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2025-11-03T14:00:57.966607Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2025-11-03T14:00:57.966613Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2025-11-03T14:00:57.966621Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2025-11-03T14:00:57.966652Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2025-11-03T14:00:57.966658Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2025-11-03T14:00:57.966665Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2025-11-03T14:00:57.966671Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2025-11-03T14:00:57.966678Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2025-11-03T14:00:57.966685Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... :1:0] DiskIsOk# true 2025-11-03T14:01:01.130154Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483739 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:01.130162Z 1 05h45m00.122016s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483739 VDiskId# [8000005b:2:2:2:0] DiskIsOk# true 2025-11-03T14:01:01.134789Z 1 05h45m00.122528s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483739 Items# [8000005b:2:1:0:0]: 18:1000:1005 -> 16:1001:1008 ConfigTxSeqNo# 546 2025-11-03T14:01:01.134809Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483739 Success# true 2025-11-03T14:01:01.134845Z 18 05h45m00.122528s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-11-03T14:01:01.134873Z 36 05h45m00.122528s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2025-11-03T14:01:01.134886Z 36 05h45m00.122528s :BS_NODE DEBUG: [36] VDiskId# [8000005b:2:2:2:0] -> [8000005b:3:2:2:0] 2025-11-03T14:01:01.134904Z 3 05h45m00.122528s :BS_NODE DEBUG: [3] NodeServiceSetUpdate 2025-11-03T14:01:01.134914Z 3 05h45m00.122528s :BS_NODE DEBUG: [3] VDiskId# [8000005b:2:0:2:0] -> [8000005b:3:0:2:0] 2025-11-03T14:01:01.134932Z 21 05h45m00.122528s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-11-03T14:01:01.134941Z 21 05h45m00.122528s :BS_NODE DEBUG: [21] VDiskId# [8000005b:2:1:1:0] -> [8000005b:3:1:1:0] 2025-11-03T14:01:01.134957Z 6 05h45m00.122528s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-11-03T14:01:01.134967Z 6 05h45m00.122528s :BS_NODE DEBUG: [6] VDiskId# [8000005b:2:0:0:0] -> [8000005b:3:0:0:0] 2025-11-03T14:01:01.134986Z 24 05h45m00.122528s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-11-03T14:01:01.134995Z 24 05h45m00.122528s :BS_NODE DEBUG: [24] VDiskId# [8000005b:2:1:2:0] -> [8000005b:3:1:2:0] 2025-11-03T14:01:01.135012Z 9 05h45m00.122528s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-11-03T14:01:01.135021Z 9 05h45m00.122528s :BS_NODE DEBUG: [9] VDiskId# [8000005b:2:0:1:0] -> [8000005b:3:0:1:0] 2025-11-03T14:01:01.135040Z 30 05h45m00.122528s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-11-03T14:01:01.135049Z 30 05h45m00.122528s :BS_NODE DEBUG: [30] VDiskId# [8000005b:2:2:0:0] -> [8000005b:3:2:0:0] 2025-11-03T14:01:01.135068Z 33 05h45m00.122528s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2025-11-03T14:01:01.135076Z 33 05h45m00.122528s :BS_NODE DEBUG: [33] VDiskId# [8000005b:2:2:1:0] -> [8000005b:3:2:1:0] 2025-11-03T14:01:01.135095Z 16 05h45m00.122528s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-11-03T14:01:01.135104Z 16 05h45m00.122528s :BS_NODE DEBUG: [16] VDiskId# [8000005b:3:1:0:0] PDiskId# 1001 VSlotId# 1008 created 2025-11-03T14:01:01.135140Z 16 05h45m00.122528s :BS_NODE DEBUG: [16] VDiskId# [8000005b:3:1:0:0] status changed to INIT_PENDING 2025-11-03T14:01:01.135232Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483723 2025-11-03T14:01:01.135496Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:01.135507Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:2:0:0:0] DiskIsOk# true 2025-11-03T14:01:01.135515Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:01.135521Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:2:0:1:0] DiskIsOk# true 2025-11-03T14:01:01.135528Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:01.135533Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:2:0:2:0] DiskIsOk# true 2025-11-03T14:01:01.135539Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:01.135547Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:2:1:1:0] DiskIsOk# true 2025-11-03T14:01:01.135553Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:01.135558Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:2:1:2:0] DiskIsOk# true 2025-11-03T14:01:01.135564Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:01.135569Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:2:2:0:0] DiskIsOk# true 2025-11-03T14:01:01.135575Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:01.135580Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:2:2:1:0] DiskIsOk# true 2025-11-03T14:01:01.135586Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483723 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:01.135591Z 1 05h45m00.122528s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483723 VDiskId# [8000004b:2:2:2:0] DiskIsOk# true 2025-11-03T14:01:01.140375Z 1 05h45m00.123040s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483723 Items# [8000004b:2:1:0:0]: 18:1000:1004 -> 14:1001:1008 ConfigTxSeqNo# 547 2025-11-03T14:01:01.140398Z 1 05h45m00.123040s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483723 Success# true 2025-11-03T14:01:01.140437Z 18 05h45m00.123040s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-11-03T14:01:01.140466Z 36 05h45m00.123040s :BS_NODE DEBUG: [36] NodeServiceSetUpdate 2025-11-03T14:01:01.140478Z 36 05h45m00.123040s :BS_NODE DEBUG: [36] VDiskId# [8000004b:2:2:2:0] -> [8000004b:3:2:2:0] 2025-11-03T14:01:01.140498Z 21 05h45m00.123040s :BS_NODE DEBUG: [21] NodeServiceSetUpdate 2025-11-03T14:01:01.140508Z 21 05h45m00.123040s :BS_NODE DEBUG: [21] VDiskId# [8000004b:2:1:1:0] -> [8000004b:3:1:1:0] 2025-11-03T14:01:01.140526Z 6 05h45m00.123040s :BS_NODE DEBUG: [6] NodeServiceSetUpdate 2025-11-03T14:01:01.140536Z 6 05h45m00.123040s :BS_NODE DEBUG: [6] VDiskId# [8000004b:2:0:0:0] -> [8000004b:3:0:0:0] 2025-11-03T14:01:01.140556Z 24 05h45m00.123040s :BS_NODE DEBUG: [24] NodeServiceSetUpdate 2025-11-03T14:01:01.140564Z 24 05h45m00.123040s :BS_NODE DEBUG: [24] VDiskId# [8000004b:2:1:2:0] -> [8000004b:3:1:2:0] 2025-11-03T14:01:01.140599Z 9 05h45m00.123040s :BS_NODE DEBUG: [9] NodeServiceSetUpdate 2025-11-03T14:01:01.140608Z 9 05h45m00.123040s :BS_NODE DEBUG: [9] VDiskId# [8000004b:2:0:1:0] -> [8000004b:3:0:1:0] 2025-11-03T14:01:01.140626Z 10 05h45m00.123040s :BS_NODE DEBUG: [10] NodeServiceSetUpdate 2025-11-03T14:01:01.140636Z 10 05h45m00.123040s :BS_NODE DEBUG: [10] VDiskId# [8000004b:2:0:2:0] -> [8000004b:3:0:2:0] 2025-11-03T14:01:01.140654Z 30 05h45m00.123040s :BS_NODE DEBUG: [30] NodeServiceSetUpdate 2025-11-03T14:01:01.140663Z 30 05h45m00.123040s :BS_NODE DEBUG: [30] VDiskId# [8000004b:2:2:0:0] -> [8000004b:3:2:0:0] 2025-11-03T14:01:01.140682Z 14 05h45m00.123040s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-11-03T14:01:01.140691Z 14 05h45m00.123040s :BS_NODE DEBUG: [14] VDiskId# [8000004b:3:1:0:0] PDiskId# 1001 VSlotId# 1008 created 2025-11-03T14:01:01.140721Z 14 05h45m00.123040s :BS_NODE DEBUG: [14] VDiskId# [8000004b:3:1:0:0] status changed to INIT_PENDING 2025-11-03T14:01:01.140742Z 33 05h45m00.123040s :BS_NODE DEBUG: [33] NodeServiceSetUpdate 2025-11-03T14:01:01.140751Z 33 05h45m00.123040s :BS_NODE DEBUG: [33] VDiskId# [8000004b:2:2:1:0] -> [8000004b:3:2:1:0] 2025-11-03T14:01:01.141064Z 16 05h45m01.668480s :BS_NODE DEBUG: [16] VDiskId# [8000001b:3:1:0:0] status changed to REPLICATING 2025-11-03T14:01:01.141175Z 14 05h45m01.691016s :BS_NODE DEBUG: [14] VDiskId# [8000006b:3:1:0:0] status changed to REPLICATING 2025-11-03T14:01:01.141262Z 18 05h45m02.286504s :BS_NODE DEBUG: [18] VDiskId# [8000007b:3:1:0:0] status changed to REPLICATING 2025-11-03T14:01:01.141378Z 17 05h45m02.607992s :BS_NODE DEBUG: [17] VDiskId# [8000000b:3:1:0:0] status changed to REPLICATING 2025-11-03T14:01:01.141454Z 14 05h45m04.114040s :BS_NODE DEBUG: [14] VDiskId# [8000004b:3:1:0:0] status changed to REPLICATING 2025-11-03T14:01:01.141541Z 16 05h45m04.330968s :BS_NODE DEBUG: [16] VDiskId# [8000002b:3:1:0:0] status changed to REPLICATING 2025-11-03T14:01:01.141639Z 17 05h45m04.993456s :BS_NODE DEBUG: [17] VDiskId# [8000003b:3:1:0:0] status changed to REPLICATING 2025-11-03T14:01:01.142563Z 16 05h45m05.852528s :BS_NODE DEBUG: [16] VDiskId# [8000005b:3:1:0:0] status changed to REPLICATING 2025-11-03T14:01:01.142761Z 18 05h45m09.298504s :BS_NODE DEBUG: [18] VDiskId# [8000007b:3:1:0:0] status changed to READY 2025-11-03T14:01:01.145210Z 18 05h45m09.299016s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-11-03T14:01:01.145231Z 18 05h45m09.299016s :BS_NODE DEBUG: [18] VDiskId# [8000007b:2:1:0:0] destroyed 2025-11-03T14:01:01.145930Z 17 05h45m11.268992s :BS_NODE DEBUG: [17] VDiskId# [8000000b:3:1:0:0] status changed to READY 2025-11-03T14:01:01.148385Z 18 05h45m11.269504s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-11-03T14:01:01.148405Z 18 05h45m11.269504s :BS_NODE DEBUG: [18] VDiskId# [8000000b:2:1:0:0] destroyed 2025-11-03T14:01:01.149216Z 16 05h45m15.040968s :BS_NODE DEBUG: [16] VDiskId# [8000002b:3:1:0:0] status changed to READY 2025-11-03T14:01:01.151712Z 18 05h45m15.041480s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-11-03T14:01:01.151731Z 18 05h45m15.041480s :BS_NODE DEBUG: [18] VDiskId# [8000002b:2:1:0:0] destroyed 2025-11-03T14:01:01.152369Z 16 05h45m20.811528s :BS_NODE DEBUG: [16] VDiskId# [8000005b:3:1:0:0] status changed to READY 2025-11-03T14:01:01.154821Z 18 05h45m20.812040s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-11-03T14:01:01.154840Z 18 05h45m20.812040s :BS_NODE DEBUG: [18] VDiskId# [8000005b:2:1:0:0] destroyed 2025-11-03T14:01:01.155629Z 16 05h45m28.127480s :BS_NODE DEBUG: [16] VDiskId# [8000001b:3:1:0:0] status changed to READY 2025-11-03T14:01:01.158099Z 18 05h45m28.127992s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-11-03T14:01:01.158118Z 18 05h45m28.127992s :BS_NODE DEBUG: [18] VDiskId# [8000001b:2:1:0:0] destroyed 2025-11-03T14:01:01.158977Z 14 05h45m31.163040s :BS_NODE DEBUG: [14] VDiskId# [8000004b:3:1:0:0] status changed to READY 2025-11-03T14:01:01.161433Z 18 05h45m31.163552s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-11-03T14:01:01.161452Z 18 05h45m31.163552s :BS_NODE DEBUG: [18] VDiskId# [8000004b:2:1:0:0] destroyed 2025-11-03T14:01:01.161501Z 17 05h45m32.303456s :BS_NODE DEBUG: [17] VDiskId# [8000003b:3:1:0:0] status changed to READY 2025-11-03T14:01:01.163871Z 18 05h45m32.303968s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-11-03T14:01:01.163891Z 18 05h45m32.303968s :BS_NODE DEBUG: [18] VDiskId# [8000003b:2:1:0:0] destroyed 2025-11-03T14:01:01.163938Z 14 05h45m33.147016s :BS_NODE DEBUG: [14] VDiskId# [8000006b:3:1:0:0] status changed to READY 2025-11-03T14:01:01.166168Z 18 05h45m33.147528s :BS_NODE DEBUG: [18] NodeServiceSetUpdate 2025-11-03T14:01:01.166183Z 18 05h45m33.147528s :BS_NODE DEBUG: [18] VDiskId# [8000006b:2:1:0:0] destroyed |52.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part8/ydb-library-yql-tests-sql-dq_file-part8 |52.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/workload_mixed |52.3%| [TS] {RESULT} ydb/tests/datashard/s3/flake8 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_statestorage/unittest >> TStateStorageRingGroupState::TestProxyConfigMismatch [GOOD] Test command err: RandomSeed# 7423700274439384707 2025-11-03T14:01:05.116170Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:05.116196Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:05.116203Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:05.116210Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:05.116216Z 1 00h00m00.000000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:05.116852Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639257 Sender# [1:310:42] SessionId# [0:0:0] Cookie# 0 2025-11-03T14:01:05.116962Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:05.116993Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639238 Sender# [1:220:36] SessionId# [2:131:1] Cookie# 10440559311458827647 2025-11-03T14:01:05.117007Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:01:05.118613Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC02@distconf_binding.cpp:551} TEvNodeConfigPush NodeId# 1 Cookie# 10440559311458827647 SessionId# [2:131:1] Binding# Record# {CacheUpdate { } } RootNodeId# 2 2025-11-03T14:01:05.118661Z 2 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [2:131:1] Inserted# false Subscription# {SessionId# [2:131:1] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:01:05.118712Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [7:146:1] Cookie# 8246150703096089134 2025-11-03T14:01:05.118722Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [7:146:1] Inserted# false Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:05.118745Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 8246150703096089134 SessionId# [7:146:1] Binding# {1.2/8246150703096089134@[7:146:1]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-11-03T14:01:05.118765Z 6 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [6:143:1] Cookie# 16163066369351204670 2025-11-03T14:01:05.118773Z 6 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [6:143:1] Inserted# false Subscription# {SessionId# [6:143:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:05.118784Z 6 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 16163066369351204670 SessionId# [6:143:1] Binding# {1.2/16163066369351204670@[6:143:1]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-11-03T14:01:05.118799Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:05.118809Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:05.118815Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:05.118821Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:05.118828Z 1 00h00m00.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:05.118858Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.118868Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.118874Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.118880Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.118886Z 1 00h00m00.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.118913Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [1:130:1] Cookie# 10440559311458827647 2025-11-03T14:01:05.118922Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [1:130:1] Inserted# false Subscription# {SessionId# [1:130:1] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-11-03T14:01:05.118935Z 1 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 2 Cookie# 10440559311458827647 SessionId# [1:130:1] Binding# {2.2/10440559311458827647@[1:130:1]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-11-03T14:01:05.118958Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [2:227:20] SessionId# [3:109:2] Cookie# 3311736749355051229 2025-11-03T14:01:05.118965Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 2 SessionId# [3:109:2] Inserted# false Subscription# {SessionId# [3:109:2] SubscriptionCookie# 0} NextSubscribeCookie# 4 2025-11-03T14:01:05.118975Z 3 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 2 Cookie# 3311736749355051229 SessionId# [3:109:2] Binding# {2.2/3311736749355051229@[3:109:2]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-11-03T14:01:05.119036Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [7:146:1] Cookie# 8246150703096089134 2025-11-03T14:01:05.119045Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [7:146:1] Inserted# false Subscription# {SessionId# [7:146:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:05.119057Z 7 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 8246150703096089134 SessionId# [7:146:1] Binding# {1.2/8246150703096089134@[7:146:1]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-11-03T14:01:05.119072Z 6 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [1:220:36] SessionId# [6:143:1] Cookie# 16163066369351204670 2025-11-03T14:01:05.119079Z 6 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 1 SessionId# [6:143:1] Inserted# false Subscription# {SessionId# [6:143:1] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:05.119089Z 6 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 1 Cookie# 16163066369351204670 SessionId# [6:143:1] Binding# {1.2/16163066369351204670@[6:143:1]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-11-03T14:01:05.119101Z 8 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [8:101:3] Cookie# 6730346545219498653 2025-11-03T14:01:05.119110Z 8 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [8:101:3] Inserted# false Subscription# {SessionId# [8:101:3] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:01:05.119120Z 8 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 6730346545219498653 SessionId# [8:101:3] Binding# {3.2/6730346545219498653@[8:101:3]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-11-03T14:01:05.119137Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [3:234:20] SessionId# [4:89:3] Cookie# 8108582779485206353 2025-11-03T14:01:05.119144Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 3 SessionId# [4:89:3] Inserted# false Subscription# {SessionId# [4:89:3] SubscriptionCookie# 0} NextSubscribeCookie# 3 2025-11-03T14:01:05.119154Z 4 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 3 Cookie# 8108582779485206353 SessionId# [4:89:3] Binding# {3.2/8108582779485206353@[4:89:3]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-11-03T14:01:05.119185Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [8:269:20] SessionId# [9:19:8] Cookie# 15184926012283946549 2025-11-03T14:01:05.119193Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 8 SessionId# [9:19:8] Inserted# false Subscription# {SessionId# [9:19:8] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:05.119203Z 9 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 8 Cookie# 15184926012283946549 SessionId# [9:19:8] Binding# {8.2/15184926012283946549@[9:19:8]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-11-03T14:01:05.119217Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC15@distconf.cpp:401} StateFunc Type# 268639239 Sender# [4:241:20] SessionId# [5:71:4] Cookie# 16346293880813142372 2025-11-03T14:01:05.119224Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC54@distconf_binding.cpp:336} SubscribeToPeerNode NodeId# 4 SessionId# [5:71:4] Inserted# false Subscription# {SessionId# [5:71:4] SubscriptionCookie# 0} NextSubscribeCookie# 2 2025-11-03T14:01:05.119234Z 5 00h00m00.002048s :BS_NODE DEBUG: {NWDC17@distconf_binding.cpp:400} TEvNodeConfigReversePush NodeId# 4 Cookie# 16346293880813142372 SessionId# [5:71:4] Binding# {4.2/16346293880813142372@[5:71:4]} Record# {RootNodeId: 2 CacheUpdate { } } 2025-11-03T14:01:05.119261Z 1 00h00m00.003048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:05.119273Z 1 00h00m00.003048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:05.119279Z 1 00h00m00.003048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:05.119286Z 1 00h00m00.003048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:05.119292Z 1 00h00m00.003048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:05.119297Z 1 00h00m00.003048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:05.119305Z 1 00h00m00.003048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.119312Z 1 00h00m00.003048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057 ... leLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.127626Z 1 00h00m01.947612s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.127633Z 1 00h00m01.947612s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.127641Z 1 00h00m01.947612s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.127647Z 1 00h00m01.947612s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.128381Z 1 00h00m04.050437s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:05.128395Z 1 00h00m04.050437s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:05.128401Z 1 00h00m04.050437s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:05.128407Z 1 00h00m04.050437s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:05.128413Z 1 00h00m04.050437s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:05.128421Z 1 00h00m04.050437s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:05.128428Z 1 00h00m04.050437s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.128436Z 1 00h00m04.050437s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.128442Z 1 00h00m04.050437s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.128447Z 1 00h00m04.050437s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.128453Z 1 00h00m04.050437s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.129385Z 1 00h00m08.634595s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:05.129401Z 1 00h00m08.634595s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:05.129407Z 1 00h00m08.634595s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:05.129413Z 1 00h00m08.634595s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:05.129418Z 1 00h00m08.634595s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:05.129424Z 1 00h00m08.634595s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:05.129430Z 1 00h00m08.634595s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.129437Z 1 00h00m08.634595s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.129443Z 1 00h00m08.634595s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.129449Z 1 00h00m08.634595s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.129454Z 1 00h00m08.634595s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.130035Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:05.130048Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-11-03T14:01:05.130056Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:05.130062Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-11-03T14:01:05.130068Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-11-03T14:01:05.130073Z 1 00h00m10.002048s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-11-03T14:01:05.130088Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:05.130097Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:05.130104Z 1 00h00m10.002048s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:05.131901Z 1 00h00m18.353009s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:05.131919Z 1 00h00m18.353009s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-11-03T14:01:05.131926Z 1 00h00m18.353009s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-11-03T14:01:05.131932Z 1 00h00m18.353009s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-11-03T14:01:05.131938Z 1 00h00m18.353009s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 3} 2025-11-03T14:01:05.131944Z 1 00h00m18.353009s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 4} 2025-11-03T14:01:05.131951Z 1 00h00m18.353009s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2025-11-03T14:01:05.131958Z 1 00h00m18.353009s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 1 ClusterStateGuid: 2} 2025-11-03T14:01:05.131965Z 1 00h00m18.353009s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=2 2025-11-03T14:01:05.131981Z 1 00h00m18.353009s :BS_NODE INFO: {NW51@node_warden_resource.cpp:362} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 2 2025-11-03T14:01:05.132527Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-11-03T14:01:05.132555Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-11-03T14:01:05.132562Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-11-03T14:01:05.132568Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-11-03T14:01:05.132574Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 3} 2025-11-03T14:01:05.132580Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 4} 2025-11-03T14:01:05.132591Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:05.132599Z 1 00h00m20.100000s :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72057594037932033 ClusterStateGeneration: 1 ClusterStateGuid: 2 CurrentLeader: [1:306:40] CurrentLeaderTablet: [1:310:42] CurrentGeneration: 2 CurrentStep: 0} 2025-11-03T14:01:05.132607Z 1 00h00m20.100000s :STATESTORAGE DEBUG: StateStorageProxy TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=0 clusterStateGeneration=1 Info->ClusterStateGuid=0 clusterStateGuid=2 2025-11-03T14:01:05.132619Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:362} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 1 ClusterStateGuid: 2 2025-11-03T14:01:05.132631Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-11-03T14:01:05.132637Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=1 msgGeneration=3 Info->ClusterStateGuid=2 msgGuid=4 2025-11-03T14:01:05.132645Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:362} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 3 ClusterStateGuid: 4 2025-11-03T14:01:05.132656Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaUpdate TabletID: 72057594037932033} 2025-11-03T14:01:05.132661Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=1 msgGeneration=3 Info->ClusterStateGuid=2 msgGuid=4 2025-11-03T14:01:05.132669Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:362} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 3 ClusterStateGuid: 4 2025-11-03T14:01:05.133303Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica::Handle ev: NKikimrStateStorage.TEvCleanup TabletID: 72057594037932033 ProposedLeader { RawX1: 0 RawX2: 0 } ClusterStateGeneration: 3 ClusterStateGuid: 4 2025-11-03T14:01:05.133313Z 1 00h00m20.100000s :STATESTORAGE DEBUG: Replica TEvNodeWardenNotifyConfigMismatch: Info->ClusterStateGeneration=1 msgGeneration=3 Info->ClusterStateGuid=2 msgGuid=4 2025-11-03T14:01:05.133322Z 1 00h00m20.100000s :BS_NODE INFO: {NW51@node_warden_resource.cpp:362} TEvNodeWardenNotifyConfigMismatch: NodeId: 1 ClusterStateGeneration: 3 ClusterStateGuid: 4 |52.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |52.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part6/ydb-library-yql-tests-sql-dq_file-part6 |52.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |52.3%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |52.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |52.3%| COMPACTING CACHE 49.4GiB ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalSelfHeal [GOOD] Test command err: 2025-11-03T14:01:07.654104Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-11-03T14:01:07.654122Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-11-03T14:01:07.654138Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-11-03T14:01:07.654143Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-11-03T14:01:07.654151Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-11-03T14:01:07.654156Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-11-03T14:01:07.654163Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-11-03T14:01:07.654168Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-11-03T14:01:07.654181Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-11-03T14:01:07.654186Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-11-03T14:01:07.654193Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-11-03T14:01:07.654198Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-11-03T14:01:07.654205Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-11-03T14:01:07.654209Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-11-03T14:01:07.654217Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-11-03T14:01:07.654222Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-11-03T14:01:07.654229Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-11-03T14:01:07.654234Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-11-03T14:01:07.654241Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-11-03T14:01:07.654247Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-11-03T14:01:07.654255Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-11-03T14:01:07.654260Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-11-03T14:01:07.654266Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-11-03T14:01:07.654271Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-11-03T14:01:07.654279Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-11-03T14:01:07.654284Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-11-03T14:01:07.654290Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-11-03T14:01:07.654295Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-11-03T14:01:07.654302Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-11-03T14:01:07.654307Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-11-03T14:01:07.654314Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-11-03T14:01:07.654319Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-11-03T14:01:07.654325Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-11-03T14:01:07.654330Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-11-03T14:01:07.654336Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-11-03T14:01:07.654340Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-11-03T14:01:07.654347Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-11-03T14:01:07.654351Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-11-03T14:01:07.654357Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-11-03T14:01:07.654362Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-11-03T14:01:07.654377Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-11-03T14:01:07.654385Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-11-03T14:01:07.654392Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-11-03T14:01:07.654397Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-11-03T14:01:07.654403Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-11-03T14:01:07.654408Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-11-03T14:01:07.654415Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-11-03T14:01:07.654420Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-11-03T14:01:07.654427Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-11-03T14:01:07.654432Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-11-03T14:01:07.654439Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-11-03T14:01:07.654444Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-11-03T14:01:07.654451Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-11-03T14:01:07.654458Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-11-03T14:01:07.654465Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-11-03T14:01:07.654470Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-11-03T14:01:07.654478Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-11-03T14:01:07.654482Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-11-03T14:01:07.654496Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-11-03T14:01:07.654501Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-11-03T14:01:07.654508Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-11-03T14:01:07.654512Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-11-03T14:01:07.654519Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-11-03T14:01:07.654524Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-11-03T14:01:07.654531Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-11-03T14:01:07.654536Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-11-03T14:01:07.654543Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-11-03T14:01:07.654548Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-11-03T14:01:07.654555Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-11-03T14:01:07.654559Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-11-03T14:01:07.654566Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-11-03T14:01:07.654570Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-11-03T14:01:07.659488Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2025-11-03T14:01:07.659811Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2025-11-03T14:01:07.659827Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2025-11-03T14:01:07.659834Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2025-11-03T14:01:07.659842Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2025-11-03T14:01:07.659850Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2025-11-03T14:01:07.659858Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2025-11-03T14:01:07.659866Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2025-11-03T14:01:07.659874Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2025-11-03T14:01:07.659882Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2025-11-03T14:01:07.659891Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2025-11-03T14:01:07.659899Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2025-11-03T14:01:07.659907Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2025-11-03T14:01:07.659915Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2025-11-03T14:01:07.659923Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2025-11-03T14:01:07.659931Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2025-11-03T14:01:07.659939Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2025-11-03T14:01:07.659947Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2025-11-03T14:01:07.659955Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2025-11-03T14:01:07.659963Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2025-11-03T14:01:07.659971Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2025-11-03T14:01:07.659979Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2025-11-03T14:01:07.659988Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2025-11-03T14:01:07.659999Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2025-11-03T14:01:07.660007Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2025-11-03T14:01:07.660015Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2025-11-03T14:01:07.660023Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2025-11-03T14:01:07.660032Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2025-11-03T14:01:07.660040Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2025-11-03T14:01:07.660049Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2025-11-03T14:01:07.660057Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2025-11-03T14:01:07.660065Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2025-11-03T14:01:07.660073Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2025-11-03T14:01:07.660082Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2025-11-03T14:01:07.660090Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... icated# true 2025-11-03T14:01:08.108676Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483705 VDiskId# [80000039:1:2:1:0] DiskIsOk# true 2025-11-03T14:01:08.108682Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483705 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:08.108687Z 1 00h05m00.104608s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483705 VDiskId# [80000039:1:2:2:0] DiskIsOk# true 2025-11-03T14:01:08.112301Z 1 00h05m00.105120s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483705 Items# [80000039:1:1:1:0]: 14:1003:1003 -> 14:1000:1010 ConfigTxSeqNo# 48 2025-11-03T14:01:08.112318Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483705 Success# true 2025-11-03T14:01:08.112352Z 17 00h05m00.105120s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-11-03T14:01:08.112365Z 17 00h05m00.105120s :BS_NODE DEBUG: [17] VDiskId# [80000039:1:1:2:0] -> [80000039:2:1:2:0] 2025-11-03T14:01:08.112385Z 35 00h05m00.105120s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2025-11-03T14:01:08.112394Z 35 00h05m00.105120s :BS_NODE DEBUG: [35] VDiskId# [80000039:1:2:0:0] -> [80000039:2:2:0:0] 2025-11-03T14:01:08.112413Z 2 00h05m00.105120s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-11-03T14:01:08.112421Z 2 00h05m00.105120s :BS_NODE DEBUG: [2] VDiskId# [80000039:1:0:1:0] -> [80000039:2:0:1:0] 2025-11-03T14:01:08.112440Z 5 00h05m00.105120s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-11-03T14:01:08.112449Z 5 00h05m00.105120s :BS_NODE DEBUG: [5] VDiskId# [80000039:1:0:2:0] -> [80000039:2:0:2:0] 2025-11-03T14:01:08.112465Z 23 00h05m00.105120s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-11-03T14:01:08.112474Z 23 00h05m00.105120s :BS_NODE DEBUG: [23] VDiskId# [80000039:1:1:0:0] -> [80000039:2:1:0:0] 2025-11-03T14:01:08.112491Z 26 00h05m00.105120s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2025-11-03T14:01:08.112500Z 26 00h05m00.105120s :BS_NODE DEBUG: [26] VDiskId# [80000039:1:2:1:0] -> [80000039:2:2:1:0] 2025-11-03T14:01:08.112536Z 11 00h05m00.105120s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2025-11-03T14:01:08.112546Z 11 00h05m00.105120s :BS_NODE DEBUG: [11] VDiskId# [80000039:1:0:0:0] -> [80000039:2:0:0:0] 2025-11-03T14:01:08.112563Z 29 00h05m00.105120s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-11-03T14:01:08.112575Z 29 00h05m00.105120s :BS_NODE DEBUG: [29] VDiskId# [80000039:1:2:2:0] -> [80000039:2:2:2:0] 2025-11-03T14:01:08.112595Z 14 00h05m00.105120s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-11-03T14:01:08.112603Z 14 00h05m00.105120s :BS_NODE DEBUG: [14] VDiskId# [80000039:2:1:1:0] PDiskId# 1000 VSlotId# 1010 created 2025-11-03T14:01:08.112616Z 14 00h05m00.105120s :BS_NODE DEBUG: [14] VDiskId# [80000039:2:1:1:0] status changed to INIT_PENDING 2025-11-03T14:01:08.112690Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH01@self_heal.cpp:76} Reassigner starting GroupId# 2147483689 2025-11-03T14:01:08.112900Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:08.112911Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:0:0:0] DiskIsOk# true 2025-11-03T14:01:08.112919Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:08.112925Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:0:1:0] DiskIsOk# true 2025-11-03T14:01:08.112932Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:08.112939Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:0:2:0] DiskIsOk# true 2025-11-03T14:01:08.112946Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:08.112952Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:1:0:0] DiskIsOk# true 2025-11-03T14:01:08.112959Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:08.112966Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:1:2:0] DiskIsOk# true 2025-11-03T14:01:08.112973Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:08.112978Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:2:0:0] DiskIsOk# true 2025-11-03T14:01:08.112985Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:08.112991Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:2:1:0] DiskIsOk# true 2025-11-03T14:01:08.112998Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH03@self_heal.cpp:116} Reassigner TEvVStatusResult GroupId# 2147483689 Status# OK JoinedGroup# true Replicated# true 2025-11-03T14:01:08.113003Z 1 00h05m00.105120s :BS_SELFHEAL DEBUG: {BSSH02@self_heal.cpp:101} Reassigner ProcessVDiskReply GroupId# 2147483689 VDiskId# [80000029:1:2:2:0] DiskIsOk# true 2025-11-03T14:01:08.116654Z 1 00h05m00.105632s :BS_SELFHEAL INFO: {BSSH09@self_heal.cpp:214} Reassigner succeeded GroupId# 2147483689 Items# [80000029:1:1:1:0]: 14:1003:1002 -> 14:1001:1010 ConfigTxSeqNo# 49 2025-11-03T14:01:08.116671Z 1 00h05m00.105632s :BS_SELFHEAL DEBUG: {BSSH08@self_heal.cpp:225} Reassigner finished GroupId# 2147483689 Success# true 2025-11-03T14:01:08.116708Z 17 00h05m00.105632s :BS_NODE DEBUG: [17] NodeServiceSetUpdate 2025-11-03T14:01:08.116725Z 17 00h05m00.105632s :BS_NODE DEBUG: [17] VDiskId# [80000029:1:1:2:0] -> [80000029:2:1:2:0] 2025-11-03T14:01:08.116745Z 35 00h05m00.105632s :BS_NODE DEBUG: [35] NodeServiceSetUpdate 2025-11-03T14:01:08.116754Z 35 00h05m00.105632s :BS_NODE DEBUG: [35] VDiskId# [80000029:1:2:0:0] -> [80000029:2:2:0:0] 2025-11-03T14:01:08.116771Z 2 00h05m00.105632s :BS_NODE DEBUG: [2] NodeServiceSetUpdate 2025-11-03T14:01:08.116779Z 2 00h05m00.105632s :BS_NODE DEBUG: [2] VDiskId# [80000029:1:0:1:0] -> [80000029:2:0:1:0] 2025-11-03T14:01:08.116796Z 5 00h05m00.105632s :BS_NODE DEBUG: [5] NodeServiceSetUpdate 2025-11-03T14:01:08.116805Z 5 00h05m00.105632s :BS_NODE DEBUG: [5] VDiskId# [80000029:1:0:2:0] -> [80000029:2:0:2:0] 2025-11-03T14:01:08.116823Z 23 00h05m00.105632s :BS_NODE DEBUG: [23] NodeServiceSetUpdate 2025-11-03T14:01:08.116832Z 23 00h05m00.105632s :BS_NODE DEBUG: [23] VDiskId# [80000029:1:1:0:0] -> [80000029:2:1:0:0] 2025-11-03T14:01:08.116849Z 26 00h05m00.105632s :BS_NODE DEBUG: [26] NodeServiceSetUpdate 2025-11-03T14:01:08.116858Z 26 00h05m00.105632s :BS_NODE DEBUG: [26] VDiskId# [80000029:1:2:1:0] -> [80000029:2:2:1:0] 2025-11-03T14:01:08.116875Z 11 00h05m00.105632s :BS_NODE DEBUG: [11] NodeServiceSetUpdate 2025-11-03T14:01:08.116884Z 11 00h05m00.105632s :BS_NODE DEBUG: [11] VDiskId# [80000029:1:0:0:0] -> [80000029:2:0:0:0] 2025-11-03T14:01:08.116901Z 29 00h05m00.105632s :BS_NODE DEBUG: [29] NodeServiceSetUpdate 2025-11-03T14:01:08.116910Z 29 00h05m00.105632s :BS_NODE DEBUG: [29] VDiskId# [80000029:1:2:2:0] -> [80000029:2:2:2:0] 2025-11-03T14:01:08.116929Z 14 00h05m00.105632s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-11-03T14:01:08.116938Z 14 00h05m00.105632s :BS_NODE DEBUG: [14] VDiskId# [80000029:2:1:1:0] PDiskId# 1001 VSlotId# 1010 created 2025-11-03T14:01:08.116952Z 14 00h05m00.105632s :BS_NODE DEBUG: [14] VDiskId# [80000029:2:1:1:0] status changed to INIT_PENDING 2025-11-03T14:01:08.117220Z 14 00h05m01.246608s :BS_NODE DEBUG: [14] VDiskId# [80000049:2:1:1:0] status changed to REPLICATING 2025-11-03T14:01:08.117348Z 14 00h05m02.118560s :BS_NODE DEBUG: [14] VDiskId# [80000009:2:1:1:0] status changed to REPLICATING 2025-11-03T14:01:08.117516Z 14 00h05m02.643048s :BS_NODE DEBUG: [14] VDiskId# [80000019:2:1:1:0] status changed to REPLICATING 2025-11-03T14:01:08.117685Z 14 00h05m03.406096s :BS_NODE DEBUG: [14] VDiskId# [80000059:2:1:1:0] status changed to REPLICATING 2025-11-03T14:01:08.117850Z 14 00h05m03.567584s :BS_NODE DEBUG: [14] VDiskId# [80000069:2:1:1:0] status changed to REPLICATING 2025-11-03T14:01:08.121042Z 14 00h05m04.584072s :BS_NODE DEBUG: [14] VDiskId# [80000079:2:1:1:0] status changed to REPLICATING 2025-11-03T14:01:08.121264Z 14 00h05m04.673632s :BS_NODE DEBUG: [14] VDiskId# [80000029:2:1:1:0] status changed to REPLICATING 2025-11-03T14:01:08.122137Z 14 00h05m05.834120s :BS_NODE DEBUG: [14] VDiskId# [80000039:2:1:1:0] status changed to REPLICATING 2025-11-03T14:01:08.122361Z 14 00h05m08.629048s :BS_NODE DEBUG: [14] VDiskId# [80000019:2:1:1:0] status changed to READY 2025-11-03T14:01:08.124439Z 14 00h05m08.629560s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-11-03T14:01:08.124457Z 14 00h05m08.629560s :BS_NODE DEBUG: [14] VDiskId# [80000019:1:1:1:0] destroyed 2025-11-03T14:01:08.124502Z 14 00h05m09.854072s :BS_NODE DEBUG: [14] VDiskId# [80000079:2:1:1:0] status changed to READY 2025-11-03T14:01:08.126479Z 14 00h05m09.854584s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-11-03T14:01:08.126497Z 14 00h05m09.854584s :BS_NODE DEBUG: [14] VDiskId# [80000079:1:1:1:0] destroyed 2025-11-03T14:01:08.127130Z 14 00h05m10.948632s :BS_NODE DEBUG: [14] VDiskId# [80000029:2:1:1:0] status changed to READY 2025-11-03T14:01:08.129164Z 14 00h05m10.949144s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-11-03T14:01:08.129182Z 14 00h05m10.949144s :BS_NODE DEBUG: [14] VDiskId# [80000029:1:1:1:0] destroyed 2025-11-03T14:01:08.129897Z 14 00h05m17.080608s :BS_NODE DEBUG: [14] VDiskId# [80000049:2:1:1:0] status changed to READY 2025-11-03T14:01:08.131896Z 14 00h05m17.081120s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-11-03T14:01:08.131914Z 14 00h05m17.081120s :BS_NODE DEBUG: [14] VDiskId# [80000049:1:1:1:0] destroyed 2025-11-03T14:01:08.132533Z 14 00h05m20.487120s :BS_NODE DEBUG: [14] VDiskId# [80000039:2:1:1:0] status changed to READY 2025-11-03T14:01:08.134537Z 14 00h05m20.487632s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-11-03T14:01:08.134556Z 14 00h05m20.487632s :BS_NODE DEBUG: [14] VDiskId# [80000039:1:1:1:0] destroyed 2025-11-03T14:01:08.135278Z 14 00h05m28.211560s :BS_NODE DEBUG: [14] VDiskId# [80000009:2:1:1:0] status changed to READY 2025-11-03T14:01:08.137312Z 14 00h05m28.212072s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-11-03T14:01:08.137330Z 14 00h05m28.212072s :BS_NODE DEBUG: [14] VDiskId# [80000009:1:1:1:0] destroyed 2025-11-03T14:01:08.138451Z 14 00h05m35.722584s :BS_NODE DEBUG: [14] VDiskId# [80000069:2:1:1:0] status changed to READY 2025-11-03T14:01:08.140485Z 14 00h05m35.723096s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-11-03T14:01:08.140504Z 14 00h05m35.723096s :BS_NODE DEBUG: [14] VDiskId# [80000069:1:1:1:0] destroyed 2025-11-03T14:01:08.140568Z 14 00h05m38.378096s :BS_NODE DEBUG: [14] VDiskId# [80000059:2:1:1:0] status changed to READY 2025-11-03T14:01:08.142542Z 14 00h05m38.378608s :BS_NODE DEBUG: [14] NodeServiceSetUpdate 2025-11-03T14:01:08.142559Z 14 00h05m38.378608s :BS_NODE DEBUG: [14] VDiskId# [80000059:1:1:1:0] destroyed |52.3%| [TS] {RESULT} ydb/tests/functional/minidumps/flake8 >> downloader::import_test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/bscontroller/ut_selfheal/unittest >> BsControllerTest::TestLocalBrokenRelocation [GOOD] Test command err: 2025-11-03T14:01:07.577794Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Bootstrap 2025-11-03T14:01:07.577812Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] Connect 2025-11-03T14:01:07.577829Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Bootstrap 2025-11-03T14:01:07.577834Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] Connect 2025-11-03T14:01:07.577841Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Bootstrap 2025-11-03T14:01:07.577846Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] Connect 2025-11-03T14:01:07.577854Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Bootstrap 2025-11-03T14:01:07.577858Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] Connect 2025-11-03T14:01:07.577871Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Bootstrap 2025-11-03T14:01:07.577876Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] Connect 2025-11-03T14:01:07.577883Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Bootstrap 2025-11-03T14:01:07.577888Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] Connect 2025-11-03T14:01:07.577895Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Bootstrap 2025-11-03T14:01:07.577900Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] Connect 2025-11-03T14:01:07.577907Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Bootstrap 2025-11-03T14:01:07.577912Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] Connect 2025-11-03T14:01:07.577919Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Bootstrap 2025-11-03T14:01:07.577923Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] Connect 2025-11-03T14:01:07.577930Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Bootstrap 2025-11-03T14:01:07.577936Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] Connect 2025-11-03T14:01:07.577943Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Bootstrap 2025-11-03T14:01:07.577947Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] Connect 2025-11-03T14:01:07.577954Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Bootstrap 2025-11-03T14:01:07.577959Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] Connect 2025-11-03T14:01:07.577966Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Bootstrap 2025-11-03T14:01:07.577970Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] Connect 2025-11-03T14:01:07.577977Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Bootstrap 2025-11-03T14:01:07.577982Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] Connect 2025-11-03T14:01:07.577989Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Bootstrap 2025-11-03T14:01:07.577994Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] Connect 2025-11-03T14:01:07.578001Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Bootstrap 2025-11-03T14:01:07.578006Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] Connect 2025-11-03T14:01:07.578013Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Bootstrap 2025-11-03T14:01:07.578018Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] Connect 2025-11-03T14:01:07.578025Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Bootstrap 2025-11-03T14:01:07.578030Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] Connect 2025-11-03T14:01:07.578038Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Bootstrap 2025-11-03T14:01:07.578042Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] Connect 2025-11-03T14:01:07.578049Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Bootstrap 2025-11-03T14:01:07.578054Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] Connect 2025-11-03T14:01:07.578067Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Bootstrap 2025-11-03T14:01:07.578075Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] Connect 2025-11-03T14:01:07.578082Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Bootstrap 2025-11-03T14:01:07.578087Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] Connect 2025-11-03T14:01:07.578093Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Bootstrap 2025-11-03T14:01:07.578098Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] Connect 2025-11-03T14:01:07.578105Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Bootstrap 2025-11-03T14:01:07.578110Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] Connect 2025-11-03T14:01:07.578116Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Bootstrap 2025-11-03T14:01:07.578121Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] Connect 2025-11-03T14:01:07.578128Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Bootstrap 2025-11-03T14:01:07.578133Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] Connect 2025-11-03T14:01:07.578139Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Bootstrap 2025-11-03T14:01:07.578146Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] Connect 2025-11-03T14:01:07.578153Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Bootstrap 2025-11-03T14:01:07.578158Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] Connect 2025-11-03T14:01:07.578165Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Bootstrap 2025-11-03T14:01:07.578170Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] Connect 2025-11-03T14:01:07.578183Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Bootstrap 2025-11-03T14:01:07.578189Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] Connect 2025-11-03T14:01:07.578195Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Bootstrap 2025-11-03T14:01:07.578200Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] Connect 2025-11-03T14:01:07.578207Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Bootstrap 2025-11-03T14:01:07.578212Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] Connect 2025-11-03T14:01:07.578219Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Bootstrap 2025-11-03T14:01:07.578224Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] Connect 2025-11-03T14:01:07.578230Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Bootstrap 2025-11-03T14:01:07.578235Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] Connect 2025-11-03T14:01:07.578242Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Bootstrap 2025-11-03T14:01:07.578247Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] Connect 2025-11-03T14:01:07.578253Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Bootstrap 2025-11-03T14:01:07.578258Z 36 00h00m00.000000s :BS_NODE DEBUG: [36] Connect 2025-11-03T14:01:07.583102Z 1 00h00m00.000000s :BS_NODE DEBUG: [1] ClientConnected Sender# [1:2719:59] Status# ERROR ClientId# [1:2719:59] ServerId# [0:0:0] PipeClient# [1:2719:59] 2025-11-03T14:01:07.583410Z 2 00h00m00.000000s :BS_NODE DEBUG: [2] ClientConnected Sender# [2:2720:41] Status# ERROR ClientId# [2:2720:41] ServerId# [0:0:0] PipeClient# [2:2720:41] 2025-11-03T14:01:07.583422Z 3 00h00m00.000000s :BS_NODE DEBUG: [3] ClientConnected Sender# [3:2721:41] Status# ERROR ClientId# [3:2721:41] ServerId# [0:0:0] PipeClient# [3:2721:41] 2025-11-03T14:01:07.583431Z 4 00h00m00.000000s :BS_NODE DEBUG: [4] ClientConnected Sender# [4:2722:41] Status# ERROR ClientId# [4:2722:41] ServerId# [0:0:0] PipeClient# [4:2722:41] 2025-11-03T14:01:07.583439Z 5 00h00m00.000000s :BS_NODE DEBUG: [5] ClientConnected Sender# [5:2723:41] Status# ERROR ClientId# [5:2723:41] ServerId# [0:0:0] PipeClient# [5:2723:41] 2025-11-03T14:01:07.583447Z 6 00h00m00.000000s :BS_NODE DEBUG: [6] ClientConnected Sender# [6:2724:41] Status# ERROR ClientId# [6:2724:41] ServerId# [0:0:0] PipeClient# [6:2724:41] 2025-11-03T14:01:07.583456Z 7 00h00m00.000000s :BS_NODE DEBUG: [7] ClientConnected Sender# [7:2725:41] Status# ERROR ClientId# [7:2725:41] ServerId# [0:0:0] PipeClient# [7:2725:41] 2025-11-03T14:01:07.583464Z 8 00h00m00.000000s :BS_NODE DEBUG: [8] ClientConnected Sender# [8:2726:41] Status# ERROR ClientId# [8:2726:41] ServerId# [0:0:0] PipeClient# [8:2726:41] 2025-11-03T14:01:07.583472Z 9 00h00m00.000000s :BS_NODE DEBUG: [9] ClientConnected Sender# [9:2727:41] Status# ERROR ClientId# [9:2727:41] ServerId# [0:0:0] PipeClient# [9:2727:41] 2025-11-03T14:01:07.583480Z 10 00h00m00.000000s :BS_NODE DEBUG: [10] ClientConnected Sender# [10:2728:41] Status# ERROR ClientId# [10:2728:41] ServerId# [0:0:0] PipeClient# [10:2728:41] 2025-11-03T14:01:07.583489Z 11 00h00m00.000000s :BS_NODE DEBUG: [11] ClientConnected Sender# [11:2729:41] Status# ERROR ClientId# [11:2729:41] ServerId# [0:0:0] PipeClient# [11:2729:41] 2025-11-03T14:01:07.583497Z 12 00h00m00.000000s :BS_NODE DEBUG: [12] ClientConnected Sender# [12:2730:41] Status# ERROR ClientId# [12:2730:41] ServerId# [0:0:0] PipeClient# [12:2730:41] 2025-11-03T14:01:07.583505Z 13 00h00m00.000000s :BS_NODE DEBUG: [13] ClientConnected Sender# [13:2731:41] Status# ERROR ClientId# [13:2731:41] ServerId# [0:0:0] PipeClient# [13:2731:41] 2025-11-03T14:01:07.583513Z 14 00h00m00.000000s :BS_NODE DEBUG: [14] ClientConnected Sender# [14:2732:41] Status# ERROR ClientId# [14:2732:41] ServerId# [0:0:0] PipeClient# [14:2732:41] 2025-11-03T14:01:07.583522Z 15 00h00m00.000000s :BS_NODE DEBUG: [15] ClientConnected Sender# [15:2733:41] Status# ERROR ClientId# [15:2733:41] ServerId# [0:0:0] PipeClient# [15:2733:41] 2025-11-03T14:01:07.583530Z 16 00h00m00.000000s :BS_NODE DEBUG: [16] ClientConnected Sender# [16:2734:41] Status# ERROR ClientId# [16:2734:41] ServerId# [0:0:0] PipeClient# [16:2734:41] 2025-11-03T14:01:07.583538Z 17 00h00m00.000000s :BS_NODE DEBUG: [17] ClientConnected Sender# [17:2735:41] Status# ERROR ClientId# [17:2735:41] ServerId# [0:0:0] PipeClient# [17:2735:41] 2025-11-03T14:01:07.583546Z 18 00h00m00.000000s :BS_NODE DEBUG: [18] ClientConnected Sender# [18:2736:41] Status# ERROR ClientId# [18:2736:41] ServerId# [0:0:0] PipeClient# [18:2736:41] 2025-11-03T14:01:07.583555Z 19 00h00m00.000000s :BS_NODE DEBUG: [19] ClientConnected Sender# [19:2737:41] Status# ERROR ClientId# [19:2737:41] ServerId# [0:0:0] PipeClient# [19:2737:41] 2025-11-03T14:01:07.583563Z 20 00h00m00.000000s :BS_NODE DEBUG: [20] ClientConnected Sender# [20:2738:41] Status# ERROR ClientId# [20:2738:41] ServerId# [0:0:0] PipeClient# [20:2738:41] 2025-11-03T14:01:07.583571Z 21 00h00m00.000000s :BS_NODE DEBUG: [21] ClientConnected Sender# [21:2739:41] Status# ERROR ClientId# [21:2739:41] ServerId# [0:0:0] PipeClient# [21:2739:41] 2025-11-03T14:01:07.583579Z 22 00h00m00.000000s :BS_NODE DEBUG: [22] ClientConnected Sender# [22:2740:41] Status# ERROR ClientId# [22:2740:41] ServerId# [0:0:0] PipeClient# [22:2740:41] 2025-11-03T14:01:07.583588Z 23 00h00m00.000000s :BS_NODE DEBUG: [23] ClientConnected Sender# [23:2741:41] Status# ERROR ClientId# [23:2741:41] ServerId# [0:0:0] PipeClient# [23:2741:41] 2025-11-03T14:01:07.583599Z 24 00h00m00.000000s :BS_NODE DEBUG: [24] ClientConnected Sender# [24:2742:41] Status# ERROR ClientId# [24:2742:41] ServerId# [0:0:0] PipeClient# [24:2742:41] 2025-11-03T14:01:07.583608Z 25 00h00m00.000000s :BS_NODE DEBUG: [25] ClientConnected Sender# [25:2743:41] Status# ERROR ClientId# [25:2743:41] ServerId# [0:0:0] PipeClient# [25:2743:41] 2025-11-03T14:01:07.583616Z 26 00h00m00.000000s :BS_NODE DEBUG: [26] ClientConnected Sender# [26:2744:41] Status# ERROR ClientId# [26:2744:41] ServerId# [0:0:0] PipeClient# [26:2744:41] 2025-11-03T14:01:07.583625Z 27 00h00m00.000000s :BS_NODE DEBUG: [27] ClientConnected Sender# [27:2745:41] Status# ERROR ClientId# [27:2745:41] ServerId# [0:0:0] PipeClient# [27:2745:41] 2025-11-03T14:01:07.583633Z 28 00h00m00.000000s :BS_NODE DEBUG: [28] ClientConnected Sender# [28:2746:41] Status# ERROR ClientId# [28:2746:41] ServerId# [0:0:0] PipeClient# [28:2746:41] 2025-11-03T14:01:07.583641Z 29 00h00m00.000000s :BS_NODE DEBUG: [29] ClientConnected Sender# [29:2747:41] Status# ERROR ClientId# [29:2747:41] ServerId# [0:0:0] PipeClient# [29:2747:41] 2025-11-03T14:01:07.583650Z 30 00h00m00.000000s :BS_NODE DEBUG: [30] ClientConnected Sender# [30:2748:41] Status# ERROR ClientId# [30:2748:41] ServerId# [0:0:0] PipeClient# [30:2748:41] 2025-11-03T14:01:07.583659Z 31 00h00m00.000000s :BS_NODE DEBUG: [31] ClientConnected Sender# [31:2749:41] Status# ERROR ClientId# [31:2749:41] ServerId# [0:0:0] PipeClient# [31:2749:41] 2025-11-03T14:01:07.583667Z 32 00h00m00.000000s :BS_NODE DEBUG: [32] ClientConnected Sender# [32:2750:41] Status# ERROR ClientId# [32:2750:41] ServerId# [0:0:0] PipeClient# [32:2750:41] 2025-11-03T14:01:07.583676Z 33 00h00m00.000000s :BS_NODE DEBUG: [33] ClientConnected Sender# [33:2751:41] Status# ERROR ClientId# [33:2751:41] ServerId# [0:0:0] PipeClient# [33:2751:41] 2025-11-03T14:01:07.583685Z 34 00h00m00.000000s :BS_NODE DEBUG: [34] ClientConnected Sender# [34:2752:41] Status# ERROR ClientId# [34:2752:41] ServerId# [0:0:0] PipeClient# [34:2752:41] 2025-11-03T14:01:07.583693Z 35 00h00m00.000000s :BS_NODE DEBUG: [35] ClientConnected Sender# [35:2753:41] Status# ERROR ClientId# [35:2753:41 ... 25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000001:2:2:2:0] -> [80000001:3:2:2:0] 2025-11-03T14:01:08.444852Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000021:2:2:2:0] -> [80000021:3:2:2:0] 2025-11-03T14:01:08.444858Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000031:2:2:2:0] -> [80000031:3:2:2:0] 2025-11-03T14:01:08.444864Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000051:2:2:2:0] -> [80000051:3:2:2:0] 2025-11-03T14:01:08.444870Z 28 01h25m00.102560s :BS_NODE DEBUG: [28] VDiskId# [80000061:2:2:2:0] -> [80000061:3:2:2:0] 2025-11-03T14:01:08.444960Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] NodeServiceSetUpdate 2025-11-03T14:01:08.444976Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000010:2:1:0:0] -> [80000010:3:1:0:0] 2025-11-03T14:01:08.444983Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000040:2:1:0:0] -> [80000040:3:1:0:0] 2025-11-03T14:01:08.444989Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000070:2:1:0:0] -> [80000070:3:1:0:0] 2025-11-03T14:01:08.444995Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000001:2:1:1:0] -> [80000001:3:1:1:0] 2025-11-03T14:01:08.445001Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000021:2:1:1:0] -> [80000021:3:1:1:0] 2025-11-03T14:01:08.445006Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000031:2:1:1:0] -> [80000031:3:1:1:0] 2025-11-03T14:01:08.445013Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000051:2:1:1:0] -> [80000051:3:1:1:0] 2025-11-03T14:01:08.445019Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000061:2:1:1:0] -> [80000061:3:1:1:0] 2025-11-03T14:01:08.445026Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000002:1:1:2:0] -> [80000002:2:1:2:0] 2025-11-03T14:01:08.445032Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000012:1:1:2:0] -> [80000012:2:1:2:0] 2025-11-03T14:01:08.445039Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000022:1:1:2:0] -> [80000022:2:1:2:0] 2025-11-03T14:01:08.445045Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000032:1:1:2:0] -> [80000032:2:1:2:0] 2025-11-03T14:01:08.445051Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000042:1:1:2:0] -> [80000042:2:1:2:0] 2025-11-03T14:01:08.445058Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000052:1:1:2:0] -> [80000052:2:1:2:0] 2025-11-03T14:01:08.445064Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000062:1:1:2:0] -> [80000062:2:1:2:0] 2025-11-03T14:01:08.445070Z 13 01h25m00.102560s :BS_NODE DEBUG: [13] VDiskId# [80000072:1:1:2:0] -> [80000072:2:1:2:0] 2025-11-03T14:01:08.445161Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] NodeServiceSetUpdate 2025-11-03T14:01:08.445175Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000010:2:2:2:0] -> [80000010:3:2:2:0] 2025-11-03T14:01:08.445182Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000040:2:2:2:0] -> [80000040:3:2:2:0] 2025-11-03T14:01:08.445189Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000070:2:2:2:0] -> [80000070:3:2:2:0] 2025-11-03T14:01:08.445196Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000002:1:2:0:0] -> [80000002:2:2:0:0] 2025-11-03T14:01:08.445202Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000012:1:2:0:0] -> [80000012:2:2:0:0] 2025-11-03T14:01:08.445208Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000022:1:2:0:0] -> [80000022:2:2:0:0] 2025-11-03T14:01:08.445214Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000032:1:2:0:0] -> [80000032:2:2:0:0] 2025-11-03T14:01:08.445221Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000042:1:2:0:0] -> [80000042:2:2:0:0] 2025-11-03T14:01:08.445228Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000052:1:2:0:0] -> [80000052:2:2:0:0] 2025-11-03T14:01:08.445234Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000062:1:2:0:0] -> [80000062:2:2:0:0] 2025-11-03T14:01:08.445241Z 31 01h25m00.102560s :BS_NODE DEBUG: [31] VDiskId# [80000072:1:2:0:0] -> [80000072:2:2:0:0] 2025-11-03T14:01:08.445309Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] NodeServiceSetUpdate 2025-11-03T14:01:08.445321Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000010:2:1:1:0] -> [80000010:3:1:1:0] 2025-11-03T14:01:08.445328Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000040:2:1:1:0] -> [80000040:3:1:1:0] 2025-11-03T14:01:08.445335Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000070:2:1:1:0] -> [80000070:3:1:1:0] 2025-11-03T14:01:08.445342Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000001:2:1:2:0] -> [80000001:3:1:2:0] 2025-11-03T14:01:08.445349Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000021:2:1:2:0] -> [80000021:3:1:2:0] 2025-11-03T14:01:08.445355Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000031:2:1:2:0] -> [80000031:3:1:2:0] 2025-11-03T14:01:08.445364Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000051:2:1:2:0] -> [80000051:3:1:2:0] 2025-11-03T14:01:08.445372Z 16 01h25m00.102560s :BS_NODE DEBUG: [16] VDiskId# [80000061:2:1:2:0] -> [80000061:3:1:2:0] 2025-11-03T14:01:08.446243Z 10 01h25m01.681560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to REPLICATING 2025-11-03T14:01:08.446359Z 10 01h25m01.729560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to REPLICATING 2025-11-03T14:01:08.446437Z 10 01h25m01.796560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to REPLICATING 2025-11-03T14:01:08.446520Z 7 01h25m01.894560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to REPLICATING 2025-11-03T14:01:08.446611Z 5 01h25m02.092560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to REPLICATING 2025-11-03T14:01:08.446718Z 5 01h25m02.531560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to REPLICATING 2025-11-03T14:01:08.446794Z 8 01h25m03.154560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to REPLICATING 2025-11-03T14:01:08.446879Z 4 01h25m03.601560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to REPLICATING 2025-11-03T14:01:08.446976Z 2 01h25m03.726560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to REPLICATING 2025-11-03T14:01:08.447060Z 4 01h25m03.987560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to REPLICATING 2025-11-03T14:01:08.447174Z 7 01h25m04.991560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to REPLICATING 2025-11-03T14:01:08.448582Z 7 01h25m05.218560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to REPLICATING 2025-11-03T14:01:08.448784Z 4 01h25m05.419560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to REPLICATING 2025-11-03T14:01:08.448894Z 2 01h25m05.443560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to REPLICATING 2025-11-03T14:01:08.448992Z 4 01h25m05.589560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to REPLICATING 2025-11-03T14:01:08.449071Z 7 01h25m05.674560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to REPLICATING 2025-11-03T14:01:08.449152Z 10 01h25m06.998560s :BS_NODE DEBUG: [10] VDiskId# [80000010:3:0:0:0] status changed to READY 2025-11-03T14:01:08.451848Z 1 01h25m06.999072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.451874Z 1 01h25m06.999072s :BS_NODE DEBUG: [1] VDiskId# [80000010:2:0:0:0] destroyed 2025-11-03T14:01:08.452774Z 4 01h25m12.184560s :BS_NODE DEBUG: [4] VDiskId# [80000032:2:0:2:0] status changed to READY 2025-11-03T14:01:08.455120Z 1 01h25m12.185072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.455143Z 1 01h25m12.185072s :BS_NODE DEBUG: [1] VDiskId# [80000032:1:0:2:0] destroyed 2025-11-03T14:01:08.455191Z 7 01h25m13.401560s :BS_NODE DEBUG: [7] VDiskId# [80000001:3:0:1:0] status changed to READY 2025-11-03T14:01:08.457394Z 1 01h25m13.402072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.457418Z 1 01h25m13.402072s :BS_NODE DEBUG: [1] VDiskId# [80000001:2:0:1:0] destroyed 2025-11-03T14:01:08.458443Z 5 01h25m15.053560s :BS_NODE DEBUG: [5] VDiskId# [80000052:2:0:2:0] status changed to READY 2025-11-03T14:01:08.460663Z 1 01h25m15.054072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.460686Z 1 01h25m15.054072s :BS_NODE DEBUG: [1] VDiskId# [80000052:1:0:2:0] destroyed 2025-11-03T14:01:08.460729Z 4 01h25m15.230560s :BS_NODE DEBUG: [4] VDiskId# [80000012:2:0:2:0] status changed to READY 2025-11-03T14:01:08.462885Z 1 01h25m15.231072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.462907Z 1 01h25m15.231072s :BS_NODE DEBUG: [1] VDiskId# [80000012:1:0:2:0] destroyed 2025-11-03T14:01:08.462958Z 8 01h25m18.627560s :BS_NODE DEBUG: [8] VDiskId# [80000061:3:0:1:0] status changed to READY 2025-11-03T14:01:08.465093Z 1 01h25m18.628072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.465114Z 1 01h25m18.628072s :BS_NODE DEBUG: [1] VDiskId# [80000061:2:0:1:0] destroyed 2025-11-03T14:01:08.465161Z 7 01h25m18.770560s :BS_NODE DEBUG: [7] VDiskId# [80000051:3:0:1:0] status changed to READY 2025-11-03T14:01:08.467335Z 1 01h25m18.771072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.467359Z 1 01h25m18.771072s :BS_NODE DEBUG: [1] VDiskId# [80000051:2:0:1:0] destroyed 2025-11-03T14:01:08.468273Z 2 01h25m20.482560s :BS_NODE DEBUG: [2] VDiskId# [80000042:2:0:2:0] status changed to READY 2025-11-03T14:01:08.470470Z 1 01h25m20.483072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.470492Z 1 01h25m20.483072s :BS_NODE DEBUG: [1] VDiskId# [80000042:1:0:2:0] destroyed 2025-11-03T14:01:08.470543Z 7 01h25m21.002560s :BS_NODE DEBUG: [7] VDiskId# [80000031:3:0:1:0] status changed to READY 2025-11-03T14:01:08.472703Z 1 01h25m21.003072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.472724Z 1 01h25m21.003072s :BS_NODE DEBUG: [1] VDiskId# [80000031:2:0:1:0] destroyed 2025-11-03T14:01:08.472855Z 7 01h25m23.893560s :BS_NODE DEBUG: [7] VDiskId# [80000021:3:0:1:0] status changed to READY 2025-11-03T14:01:08.474974Z 1 01h25m23.894072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.474996Z 1 01h25m23.894072s :BS_NODE DEBUG: [1] VDiskId# [80000021:2:0:1:0] destroyed 2025-11-03T14:01:08.475977Z 2 01h25m28.189560s :BS_NODE DEBUG: [2] VDiskId# [80000062:2:0:2:0] status changed to READY 2025-11-03T14:01:08.478052Z 1 01h25m28.190072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.478074Z 1 01h25m28.190072s :BS_NODE DEBUG: [1] VDiskId# [80000062:1:0:2:0] destroyed 2025-11-03T14:01:08.478118Z 10 01h25m28.287560s :BS_NODE DEBUG: [10] VDiskId# [80000070:3:0:0:0] status changed to READY 2025-11-03T14:01:08.480219Z 1 01h25m28.288072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.480242Z 1 01h25m28.288072s :BS_NODE DEBUG: [1] VDiskId# [80000070:2:0:0:0] destroyed 2025-11-03T14:01:08.480365Z 4 01h25m29.281560s :BS_NODE DEBUG: [4] VDiskId# [80000002:2:0:2:0] status changed to READY 2025-11-03T14:01:08.482495Z 1 01h25m29.282072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.482517Z 1 01h25m29.282072s :BS_NODE DEBUG: [1] VDiskId# [80000002:1:0:2:0] destroyed 2025-11-03T14:01:08.483704Z 4 01h25m30.143560s :BS_NODE DEBUG: [4] VDiskId# [80000022:2:0:2:0] status changed to READY 2025-11-03T14:01:08.485808Z 1 01h25m30.144072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.485832Z 1 01h25m30.144072s :BS_NODE DEBUG: [1] VDiskId# [80000022:1:0:2:0] destroyed 2025-11-03T14:01:08.485967Z 10 01h25m33.275560s :BS_NODE DEBUG: [10] VDiskId# [80000040:3:0:0:0] status changed to READY 2025-11-03T14:01:08.488047Z 1 01h25m33.276072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.488068Z 1 01h25m33.276072s :BS_NODE DEBUG: [1] VDiskId# [80000040:2:0:0:0] destroyed 2025-11-03T14:01:08.488234Z 5 01h25m34.939560s :BS_NODE DEBUG: [5] VDiskId# [80000072:2:0:2:0] status changed to READY 2025-11-03T14:01:08.490333Z 1 01h25m34.940072s :BS_NODE DEBUG: [1] NodeServiceSetUpdate 2025-11-03T14:01:08.490354Z 1 01h25m34.940072s :BS_NODE DEBUG: [1] VDiskId# [80000072:1:0:2:0] destroyed |52.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part4/ydb-library-yql-tests-sql-hybrid_file-part4 |52.4%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |52.4%| [TA] $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |52.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/library/compatibility/binaries/downloader/import_test >> downloader::import_test [GOOD] |52.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part5/py2_flake8 |52.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/pq/provider/ut/ydb-library-yql-providers-pq-provider-ut |52.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part9/ydb-library-yql-tests-sql-hybrid_file-part9 >> BridgeGet::PartRestorationAcrossBridgeOnDiscover [GOOD] |52.4%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 |52.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part7/ydb-library-yql-tests-sql-dq_file-part7 |52.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part0/ydb-library-yql-tests-sql-hybrid_file-part0 |52.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part6/ydb-library-yql-tests-sql-hybrid_file-part6 |52.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part0/ydb-library-yql-tests-sql-dq_file-part0 |52.4%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> ydb-tests-stress-s3_backups-tests::import_test [GOOD] |52.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |52.4%| [TS] {RESULT} ydb/tests/sql/flake8 |52.4%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |52.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/s3_backups/tests/import_test >> ydb-tests-stress-s3_backups-tests::import_test [GOOD] |52.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part4/ydb-library-yql-tests-sql-dq_file-part4 |52.4%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |52.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part19/py2_flake8 |52.4%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |52.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part3/ydb-library-yql-tests-sql-dq_file-part3 |52.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/workload_log |52.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/simple_queue |52.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part9/ydb-library-yql-tests-sql-dq_file-part9 >> ydb-tests-fq-http_api::import_test [GOOD] |52.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part8/ydb-library-yql-tests-sql-hybrid_file-part8 |52.4%| [TS] {RESULT} ydb/tests/functional/api/import_test |52.4%| [PK] {default-linux-x86_64, relwithdebinfo} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} |52.4%| [TS] {RESULT} ydb/tests/library/compatibility/binaries/downloader/import_test >> ydb-tests-functional-kqp-plan2svg::import_test [GOOD] |52.4%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/http_api/import_test >> ydb-tests-fq-http_api::import_test [GOOD] |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part7/ydb-library-yql-tests-sql-hybrid_file-part7 >> ydb-tests-functional-statistics::import_test [GOOD] |52.5%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/flake8 |52.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/kqp/plan2svg/import_test >> ydb-tests-functional-kqp-plan2svg::import_test [GOOD] |52.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part15/ydb-library-yql-tests-sql-dq_file-part15 |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part13/ydb-library-yql-tests-sql-dq_file-part13 |52.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/statistics/import_test >> ydb-tests-functional-statistics::import_test [GOOD] |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part3/ydb-library-yql-tests-sql-hybrid_file-part3 >> ydb-tests-functional-sqs-messaging::import_test [GOOD] >> ydb-tests-functional-tpcc::import_test [GOOD] >> ydb-tests-functional-sqs-common::import_test [GOOD] |52.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |52.5%| [TS] {RESULT} ydb/tests/fq/mem_alloc/import_test |52.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/messaging/import_test >> ydb-tests-functional-sqs-messaging::import_test [GOOD] |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |52.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpcc/import_test >> ydb-tests-functional-tpcc::import_test [GOOD] |52.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/common/import_test >> ydb-tests-functional-sqs-common::import_test [GOOD] |52.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |52.5%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 |52.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part5/ydb-library-yql-tests-sql-hybrid_file-part5 |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |52.5%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part11/ydb-library-yql-tests-sql-dq_file-part11 |52.5%| [TS] {RESULT} ydb/tests/olap/s3_import/flake8 |52.5%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |52.5%| [TS] {RESULT} ydb/tests/stress/s3_backups/tests/import_test >> ydb-tests-stress-topic-tests::import_test [GOOD] |52.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridgeOnDiscover [GOOD] Test command err: RandomSeed# 17903810022892282530 readBody# 1 mask1# 0 mask2# 0 mask3# 0 *** performing bridge discover maxId#[0:0:0:0:0:0:0] readBody# 1 mask1# 0 mask2# 0 mask3# 1 *** performing bridge discover maxId#[100501:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 2 *** performing bridge discover maxId#[100502:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 3 *** performing bridge discover maxId#[100503:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 4 *** performing bridge discover maxId#[100504:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 5 *** performing bridge discover maxId#[100505:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 6 *** performing bridge discover maxId#[100506:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 0 mask3# 7 *** performing bridge discover maxId#[100507:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 0 *** performing bridge discover maxId#[100508:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 1 *** performing bridge discover maxId#[100509:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 2 *** performing bridge discover maxId#[100510:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 3 *** performing bridge discover maxId#[100511:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 4 *** performing bridge discover maxId#[100512:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 5 *** performing bridge discover maxId#[100513:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 6 *** performing bridge discover maxId#[100514:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 1 mask3# 7 *** performing bridge discover maxId#[100515:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 0 *** performing bridge discover maxId#[100516:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 1 *** performing bridge discover maxId#[100517:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 2 *** performing bridge discover maxId#[100518:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 3 *** performing bridge discover maxId#[100519:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 4 *** performing bridge discover maxId#[100520:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 5 *** performing bridge discover maxId#[100521:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 6 *** performing bridge discover maxId#[100522:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 2 mask3# 7 *** performing bridge discover maxId#[100523:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 0 *** performing bridge discover maxId#[100524:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 1 *** performing bridge discover maxId#[100525:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 2 *** performing bridge discover maxId#[100526:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 3 *** performing bridge discover maxId#[100527:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 4 *** performing bridge discover maxId#[100528:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 5 *** performing bridge discover maxId#[100529:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 6 *** performing bridge discover maxId#[100530:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 3 mask3# 7 *** performing bridge discover maxId#[100531:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 0 *** performing bridge discover maxId#[100532:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 1 *** performing bridge discover maxId#[100533:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 2 *** performing bridge discover maxId#[100534:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 3 *** performing bridge discover maxId#[100535:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 4 *** performing bridge discover maxId#[100536:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 5 *** performing bridge discover maxId#[100537:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 6 *** performing bridge discover maxId#[100538:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 4 mask3# 7 *** performing bridge discover maxId#[100539:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 0 *** performing bridge discover maxId#[100540:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 1 *** performing bridge discover maxId#[100541:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 2 *** performing bridge discover maxId#[100542:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 3 *** performing bridge discover maxId#[100543:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 4 *** performing bridge discover maxId#[100544:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 5 *** performing bridge discover maxId#[100545:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 6 *** performing bridge discover maxId#[100546:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 5 mask3# 7 *** performing bridge discover maxId#[100547:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 0 *** performing bridge discover maxId#[100548:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 1 *** performing bridge discover maxId#[100549:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 2 *** performing bridge discover maxId#[100550:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 3 *** performing bridge discover maxId#[100551:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 4 *** performing bridge discover maxId#[100552:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 5 *** performing bridge discover maxId#[100553:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 6 *** performing bridge discover maxId#[100554:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 6 mask3# 7 *** performing bridge discover maxId#[100555:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 0 *** performing bridge discover maxId#[100556:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 1 *** performing bridge discover maxId#[100557:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 2 *** performing bridge discover maxId#[100558:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 3 *** performing bridge discover maxId#[100559:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 4 *** performing bridge discover maxId#[100560:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 5 *** performing bridge discover maxId#[100561:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 6 *** performing bridge discover maxId#[100562:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 0 mask2# 7 mask3# 7 *** performing bridge discover maxId#[100563:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 0 *** performing bridge discover maxId#[100564:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 1 *** performing bridge discover maxId#[100565:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 2 *** performing bridge discover maxId#[100566:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 3 *** performing bridge discover maxId#[100567:2:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 4 *** performing bridge discover maxId#[100568:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 5 *** performing bridge discover maxId#[100569:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 6 *** performing bridge discover maxId#[100570:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 0 mask3# 7 *** performing bridge discover maxId#[100571:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 1 mask3# 0 *** performing bridge discover maxId#[100572:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 1 mask1# 1 mask2# 1 mask3# 1 *** performing bridge discover maxId#[100573:1:1:0:0:5:0] *** reading from i# 0 *** reading from i# ... iscover maxId#[101450:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 6 mask3# 7 *** performing bridge discover maxId#[101451:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 0 *** performing bridge discover maxId#[101452:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 1 *** performing bridge discover maxId#[101453:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 2 *** performing bridge discover maxId#[101454:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 3 *** performing bridge discover maxId#[101455:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 4 *** performing bridge discover maxId#[101456:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 5 *** performing bridge discover maxId#[101457:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 6 *** performing bridge discover maxId#[101458:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 6 mask2# 7 mask3# 7 *** performing bridge discover maxId#[101459:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 0 *** performing bridge discover maxId#[101460:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 1 *** performing bridge discover maxId#[101461:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 2 *** performing bridge discover maxId#[101462:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 3 *** performing bridge discover maxId#[101463:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 4 *** performing bridge discover maxId#[101464:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 5 *** performing bridge discover maxId#[101465:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 6 *** performing bridge discover maxId#[101466:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 0 mask3# 7 *** performing bridge discover maxId#[101467:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 0 *** performing bridge discover maxId#[101468:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 1 *** performing bridge discover maxId#[101469:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 2 *** performing bridge discover maxId#[101470:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 3 *** performing bridge discover maxId#[101471:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 4 *** performing bridge discover maxId#[101472:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 5 *** performing bridge discover maxId#[101473:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 6 *** performing bridge discover maxId#[101474:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 1 mask3# 7 *** performing bridge discover maxId#[101475:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 0 *** performing bridge discover maxId#[101476:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 1 *** performing bridge discover maxId#[101477:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 2 *** performing bridge discover maxId#[101478:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 3 *** performing bridge discover maxId#[101479:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 4 *** performing bridge discover maxId#[101480:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 5 *** performing bridge discover maxId#[101481:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 6 *** performing bridge discover maxId#[101482:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 2 mask3# 7 *** performing bridge discover maxId#[101483:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 0 *** performing bridge discover maxId#[101484:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 1 *** performing bridge discover maxId#[101485:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 2 *** performing bridge discover maxId#[101486:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 3 *** performing bridge discover maxId#[101487:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 4 *** performing bridge discover maxId#[101488:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 5 *** performing bridge discover maxId#[101489:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 6 *** performing bridge discover maxId#[101490:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 3 mask3# 7 *** performing bridge discover maxId#[101491:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 0 *** performing bridge discover maxId#[101492:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 1 *** performing bridge discover maxId#[101493:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 2 *** performing bridge discover maxId#[101494:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 3 *** performing bridge discover maxId#[101495:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 4 *** performing bridge discover maxId#[101496:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 5 *** performing bridge discover maxId#[101497:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 6 *** performing bridge discover maxId#[101498:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 4 mask3# 7 *** performing bridge discover maxId#[101499:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 0 *** performing bridge discover maxId#[101500:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 1 *** performing bridge discover maxId#[101501:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 2 *** performing bridge discover maxId#[101502:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 3 *** performing bridge discover maxId#[101503:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 4 *** performing bridge discover maxId#[101504:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 5 *** performing bridge discover maxId#[101505:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 6 *** performing bridge discover maxId#[101506:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 5 mask3# 7 *** performing bridge discover maxId#[101507:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 0 *** performing bridge discover maxId#[101508:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 1 *** performing bridge discover maxId#[101509:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 2 *** performing bridge discover maxId#[101510:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 3 *** performing bridge discover maxId#[101511:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 4 *** performing bridge discover maxId#[101512:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 5 *** performing bridge discover maxId#[101513:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 6 *** performing bridge discover maxId#[101514:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 6 mask3# 7 *** performing bridge discover maxId#[101515:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 0 *** performing bridge discover maxId#[101516:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 1 *** performing bridge discover maxId#[101517:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 2 *** performing bridge discover maxId#[101518:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 3 *** performing bridge discover maxId#[101519:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 4 *** performing bridge discover maxId#[101520:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 5 *** performing bridge discover maxId#[101521:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 6 *** performing bridge discover maxId#[101522:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 readBody# 0 mask1# 7 mask2# 7 mask3# 7 *** performing bridge discover maxId#[101523:3:1:0:0:5:0] *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 >> ydb-tests-functional-serverless::import_test [GOOD] >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] |52.5%| [TS] {RESULT} ydb/tests/functional/kqp/plan2svg/import_test |52.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/hybrid_file/part2/ydb-library-yql-tests-sql-hybrid_file-part2 |52.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |52.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/stress/topic/tests/import_test >> ydb-tests-stress-topic-tests::import_test [GOOD] |52.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tests/sql/dq_file/part5/ydb-library-yql-tests-sql-dq_file-part5 |52.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/serverless/import_test >> ydb-tests-functional-serverless::import_test [GOOD] |52.6%| [TS] {RESULT} ydb/tests/functional/statistics/import_test |52.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/sqs/with_quotas/import_test >> ydb-tests-functional-sqs-with_quotas::import_test [GOOD] |52.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |52.6%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/import_test |52.6%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} |52.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |52.6%| [TA] $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |52.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |52.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |52.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |52.6%| [TS] {RESULT} ydb/tests/fq/http_api/import_test |52.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> BridgeGet::PartRestorationAcrossBridgeOnRange [GOOD] |52.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |52.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a >> ydb-tests-functional-postgresql::import_test [GOOD] |52.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |52.6%| [TA] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |52.6%| [TS] {RESULT} ydb/tests/functional/serverless/import_test |52.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |52.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/postgresql/import_test >> ydb-tests-functional-postgresql::import_test [GOOD] >> ydb-tests-functional-tpc-medium::import_test [GOOD] |52.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |52.6%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/import_test >> ydb-tests-functional-canonical::import_test [GOOD] |52.7%| [TS] {RESULT} ydb/tests/functional/api/flake8 |52.7%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |52.7%| [TS] {RESULT} ydb/tests/functional/backup_collection/flake8 |52.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/canonical/import_test >> ydb-tests-functional-canonical::import_test [GOOD] |52.7%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/test-results/unittest/{meta.json ... results_accumulator.log} |52.7%| [TS] {RESULT} ydb/tests/datashard/add_column/flake8 >> ydb-tests-functional-backup_collection::import_test [GOOD] >> ydb-tests-functional-suite_tests::import_test [GOOD] |52.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/topic/with_direct_read/topic_direct_read_it |52.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/ydb-dstool |52.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |52.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/test-results/unittest/{meta.json ... results_accumulator.log} |52.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/tpc/medium/import_test >> ydb-tests-functional-tpc-medium::import_test [GOOD] |52.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/suite_tests/import_test >> ydb-tests-functional-suite_tests::import_test [GOOD] |52.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |52.7%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/import_test |52.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/backup_collection/import_test >> ydb-tests-functional-backup_collection::import_test [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_bridge/unittest >> BridgeGet::PartRestorationAcrossBridgeOnRange [GOOD] Test command err: RandomSeed# 4376756697495342578 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 0 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 1 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 2 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 3 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 4 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 5 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 6 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 0 mask2# 7 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 0 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 1 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 2 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 2 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 1 reverse# 1 mask1# 1 mask2# 2 mask3# 2 *** performing bridge range ... sk2# 5 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 5 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 5 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 6 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 6 mask2# 7 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 0 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 1 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 2 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 3 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 4 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 5 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 6 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 0 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 1 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 2 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 3 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 4 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 5 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 6 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 indexOnly# 0 reverse# 0 mask1# 7 mask2# 7 mask3# 7 *** performing bridge range *** reading from i# 0 *** reading from i# 1 *** reading from i# 2 |52.7%| [TS] {RESULT} ydb/tests/functional/postgresql/import_test |52.7%| [TS] {RESULT} ydb/tests/stress/topic/tests/import_test |52.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |52.7%| [TS] {RESULT} ydb/tests/functional/tenants/import_test |52.7%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/test-results/unittest/{meta.json ... results_accumulator.log} |52.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part8/py2_flake8 |52.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms >> ydb-tests-functional-hive::import_test [GOOD] |52.7%| [TS] {RESULT} ydb/tests/functional/canonical/import_test |52.7%| [TS] {RESULT} ydb/tests/functional/sqs/common/import_test |52.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/public-sdk-cpp-tests-integration-sessions_pool |52.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/test-results/unittest/{meta.json ... results_accumulator.log} |52.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/hive/import_test >> ydb-tests-functional-hive::import_test [GOOD] |52.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/ydb-public-sdk-cpp-tests-integration-sessions |52.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/dml/ydb-tests-datashard-dml |52.8%| [TS] {RESULT} ydb/tests/functional/backup_collection/import_test |52.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/copy_table/ydb-tests-datashard-copy_table |52.8%| [TS] {RESULT} ydb/tests/functional/suite_tests/import_test >> ydb-tests-functional-scheme_tests::import_test [GOOD] |52.8%| [TS] {RESULT} ydb/tests/functional/tpc/medium/import_test |52.8%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part16/py2_flake8 |52.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/s3_backups >> test.py::test[join-full_trivial-off-ForceBlocks] >> test.py::test[join-order_of_qualified--Results] >> test.py::test[schema-user_schema_no_infer--ForceBlocks] |52.8%| [TS] {RESULT} ydb/tests/datashard/async_replication/flake8 >> test.py::test[produce-process_with_udf_rows-default.txt-Results] |52.8%| [TS] {RESULT} ydb/tests/olap/data_quotas/import_test |52.8%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/functional/scheme_tests/import_test >> ydb-tests-functional-scheme_tests::import_test [GOOD] |52.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 |52.8%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |52.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |52.8%| [TS] {RESULT} ydb/tests/stress/mixedpy/tests/import_test >> test.py::test[join-three_equalities_paren--ForceBlocks] |52.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |52.8%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part3/py2_flake8 >> test.py::test[aggregate-subquery_aggregation--Results] >> test.py::test[action-action_eval_cluster_use--Results] >> test.py::test[hor_join-row_num_per_sect--ForceBlocks] >> test.py::test[pg-select_qstarref1-default.txt-Results] |52.8%| [TS] {RESULT} ydb/tests/functional/hive/import_test |52.8%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/import_test |52.8%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt-Results] |52.8%| [TS] {RESULT} ydb/tests/sql/lib/flake8 |52.8%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |52.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/topic/topic_it |52.8%| [TS] {RESULT} ydb/tests/stress/topic_kafka/tests/import_test |52.9%| [LD] {RESULT} $(B)/ydb/core/kqp/tools/join_perf/bin/join_perf |52.9%| [LD] {RESULT} $(B)/ydb/core/kqp/tools/combiner_perf/bin/combiner_perf |52.9%| [TS] {RESULT} ydb/tests/functional/scheme_tests/import_test >> test.py::test[udf-named_args_for_script_with_posargs--Results] |52.9%| [TS] {RESULT} ydb/tests/stress/node_broker/tests/flake8 |52.9%| [LD] {RESULT} $(B)/ydb/core/ymq/http/ut/ydb-core-ymq-http-ut |52.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/test-results/unittest/{meta.json ... results_accumulator.log} |52.9%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/import_test >> test.py::test[select-logical_ops-default.txt-Results] |52.9%| [TS] {RESULT} ydb/tests/datashard/vector_index/medium/flake8 |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/workload_kv >> test.py::test[key_filter-split_input_with_key_filter2--Results] [SKIPPED] >> test.py::test[join-three_equalities-off-Results] [SKIPPED] |52.9%| [TS] {RESULT} ydb/tests/stress/reconfig_state_storage_workload/tests/flake8 |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/oltp_workload |52.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 >> test.py::test[key_filter-utf8_with_legacy--Results] >> test.py::test[json-jsondocument/insert--Results] >> test.py::test[blocks-lazy_nonstrict_basic--ForceBlocks] |52.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part14/py2_flake8 |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/kafka_streams_test |52.9%| [TS] {RESULT} ydb/tests/functional/tpc/medium/tpch/flake8 |52.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part15/py2_flake8 |52.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |52.9%| [TS] {RESULT} ydb/tests/stress/mixedpy/tests/flake8 |52.9%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part2/py2_flake8 >> test.py::test[optimizers-yql-9297_publish_ytcopy--Results] |52.9%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part8/py2_flake8 |52.9%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/flake8 >> test.py::test[aggregate-group_by_ru_join_agg--Results] |52.9%| [TS] {RESULT} ydb/tests/fq/restarts/import_test |52.9%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |52.9%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part10/py2_flake8 |53.0%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 |53.0%| [TS] {RESULT} ydb/tests/olap/lib/flake8 |53.0%| [TS] {RESULT} ydb/tests/fq/yds/import_test >> ActionParsingTest::ToAndFromStringAreConsistent [GOOD] >> ActionParsingTest::ActionsForQueueTest [GOOD] >> ActionParsingTest::BatchActionTest [GOOD] >> ActionParsingTest::ActionsForMessageTest [GOOD] >> ActionParsingTest::FastActionsTest [GOOD] >> HttpCountersTest::CountersAggregationTest [GOOD] >> LazyCounterTest::LazyCounterTest [GOOD] >> LazyCounterTest::AggregationLazyTest [GOOD] >> LazyCounterTest::AggregationNonLazyTest [GOOD] >> LazyCounterTest::HistogramAggregationTest [GOOD] >> MessageAttributeValidationTest::MessageAttributeValidationTest [GOOD] >> MessageBodyValidationTest::MessageBodyValidationTest [GOOD] >> MeteringCountersTest::CountersAggregationTest [GOOD] >> NameValidationTest::NameValidationTest [GOOD] >> QueueAttributes::BasicStdTest [GOOD] >> QueueAttributes::BasicFifoTest [GOOD] >> QueueAttributes::BasicClampTest [GOOD] >> QueueCountersTest::InsertCountersTest [GOOD] >> QueueCountersTest::RemoveQueueCountersNonLeaderWithoutFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersLeaderWithoutFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersNonLeaderWithFolderTest [GOOD] >> QueueCountersTest::RemoveQueueCountersLeaderWithFolderTest [GOOD] >> QueueCountersTest::CountersAggregationTest [GOOD] >> QueueCountersTest::CountersAggregationCloudTest [GOOD] >> RedrivePolicy::RedrivePolicyValidationTest [GOOD] >> RedrivePolicy::RedrivePolicyToJsonTest [GOOD] >> RedrivePolicy::RedrivePolicyArnValidationTest [GOOD] >> SecureProtobufPrinterTest::MessageBody [GOOD] >> SecureProtobufPrinterTest::Tokens [GOOD] >> StringValidationTest::IsAlphaNumAndPunctuationTest [GOOD] >> UserCountersTest::DisableCountersTest [GOOD] >> UserCountersTest::RemoveUserCountersTest [GOOD] >> UserCountersTest::CountersAggregationTest [GOOD] |53.0%| [TS] {RESULT} ydb/tests/stress/scheme_board/pile_promotion/tests/flake8 |53.0%| [TS] {RESULT} ydb/tests/functional/hive/flake8 |53.0%| [TS] {RESULT} ydb/tests/olap/delete/flake8 |53.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 |53.0%| [TS] {RESULT} ydb/tests/olap/s3_import/large/flake8 |53.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/workload_topic |53.0%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |53.0%| [TS] {RESULT} ydb/tests/solomon/reading/flake8 |53.0%| [TS] {RESULT} ydb/tests/functional/tpc/medium/flake8 >> ydb-tests-datashard-parametrized_queries::import_test [GOOD] |53.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ydb-core-blobstorage-ut_blobstorage |53.0%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part7/py2_flake8 |53.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ymq/base/ut/unittest >> UserCountersTest::CountersAggregationTest [GOOD] |53.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 >> test.py::test[schema-user_schema_no_infer--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_no_infer--Results] |53.0%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |53.0%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/parametrized_queries/import_test >> ydb-tests-datashard-parametrized_queries::import_test [GOOD] >> test.py::test[produce-process_with_udf_rows-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_opt-default.txt-Results] |53.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_replication/core-blobstorage-ut_blobstorage-ut_replication |53.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_cluster_balancing/ut_blobstorage-ut_cluster_balancing |53.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_restart_pdisk/blobstorage-ut_blobstorage-ut_restart_pdisk |53.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_vdisk_restart/blobstorage-ut_blobstorage-ut_vdisk_restart >> test.py::test[lineage-list_literal3-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_member_struct-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_tablerow-default.txt-Results] [SKIPPED] |53.0%| [TS] {RESULT} ydb/tests/olap/docs/generator/flake8 |53.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/flake8 |53.1%| [TS] {RESULT} ydb/tests/datashard/parametrized_queries/import_test |53.1%| [TS] {RESULT} ydb/tests/stress/topic/tests/flake8 |53.1%| [TS] {RESULT} ydb/tests/olap/data_quotas/flake8 >> test.py::test[multicluster-sort_force--Results] [SKIPPED] >> test.py::test[optimizers-aggregate_over_aggregate--Results] |53.1%| [LD] {RESULT} $(B)/ydb/apps/pgwire/pgwire |53.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/ydb-core-blobstorage-ut_blobstorage-ut_balancing |53.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 |53.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part2/py2_flake8 |53.1%| [TS] {RESULT} ydb/tests/stress/log/tests/flake8 |53.1%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 >> ydb-tests-fq-s3::import_test [GOOD] |53.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 |53.1%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part7/py2_flake8 |53.1%| [TS] {RESULT} ydb/library/yql/tests/sql/solomon/py2_flake8 |53.1%| [TS] {RESULT} ydb/tests/example/flake8 |53.1%| [TS] {RESULT} ydb/tests/stress/scheme_board/pile_promotion/tests/import_test |53.1%| [TS] {RESULT} ydb/core/tx/columnshard/tools/visualize_portions/import_test |53.1%| [TS] {RESULT} ydb/tests/sql/import_test |53.1%| [TS] {RESULT} ydb/core/ymq/base/ut/unittest |53.1%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |53.1%| [TS] {RESULT} ydb/tests/datashard/parametrized_queries/flake8 |53.1%| [TS] {RESULT} ydb/tests/functional/security/flake8 |53.1%| [TS] {RESULT} ydb/tests/stress/node_broker/tests/import_test >> test.py::test[pg-select_qstarref1-default.txt-Results] [GOOD] >> test.py::test[pg-select_starref1-default.txt-Results] >> test.py::test[join-order_of_qualified--Results] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--ForceBlocks] |53.1%| [TS] {RESULT} ydb/tests/datashard/ttl/flake8 |53.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part3/py2_flake8 |53.2%| [TS] {RESULT} ydb/tests/datashard/partitioning/flake8 |53.2%| [TS] {RESULT} ydb/tests/olap/import_test |53.2%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/s3/import_test >> ydb-tests-fq-s3::import_test [GOOD] |53.2%| [TS] {RESULT} ydb/tests/datashard/copy_table/flake8 |53.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |53.2%| [TS] {RESULT} ydb/tests/functional/blobstorage/import_test |53.2%| [TS] {RESULT} ydb/tests/stress/viewer/tests/flake8 |53.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |53.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |53.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part4/py2_flake8 |53.2%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part6/py2_flake8 |53.2%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part9/py2_flake8 >> test.py::test[join-three_equalities_paren--ForceBlocks] [GOOD] >> test.py::test[join-three_equalities_paren--Results] >> test.py::test[select-exists_with_table-default.txt-ForceBlocks] >> test.py::test[join-full_trivial-off-ForceBlocks] [GOOD] >> test.py::test[join-full_trivial-off-Results] [SKIPPED] >> test.py::test[join-group_compact_by--ForceBlocks] >> test.py::test[schema-user_schema_no_infer--Results] [GOOD] >> test.py::test[select-literal_negative-default.txt-ForceBlocks] |53.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_bridge/ydb-core-blobstorage-ut_blobstorage-ut_bridge |53.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 |53.2%| [TS] {RESULT} ydb/tests/stress/transfer/tests/flake8 |53.2%| [TS] {RESULT} ydb/tests/fq/s3/import_test |53.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_write_source_cursor.cpp |53.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 >> test.py::test[action-action_eval_cluster_use--Results] [GOOD] >> test.py::test[action-eval_atom_wrong_type_param--Results] [SKIPPED] >> test.py::test[action-eval_on_modif_table_fail--Results] >> test.py::test[schema-select_all_inferschema_op--ForceBlocks] |53.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/source/transactions/tx_write_source_cursor.cpp |53.2%| [TS] {RESULT} ydb/tests/datashard/split_merge/flake8 |53.2%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 |53.2%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 |53.3%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part0/py2_flake8 |53.3%| [TS] {RESULT} ydb/tests/library/ut/flake8 |53.3%| [TS] {RESULT} ydb/tests/datashard/select/flake8 |53.3%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |53.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/import_test >> test.py::test[udf-named_args_for_script_with_posargs--Results] [GOOD] >> test.py::test[udf-two_regexps--ForceBlocks] |53.3%| [TS] {RESULT} ydb/tests/datashard/dml/flake8 |53.3%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |53.3%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |53.3%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |53.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 |53.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 |53.3%| [TS] {RESULT} ydb/tests/fq/plans/import_test |53.3%| [LD] {RESULT} $(B)/ydb/core/tx/limiter/grouped_memory/ut/ydb-core-tx-limiter-grouped_memory-ut |53.3%| [TS] {RESULT} ydb/tests/olap/flake8 >> test.py::test[lineage-window_tablerow-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-test_lmap_opts--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-test_lmap_opts--Results] [SKIPPED] >> test.py::test[optimizers-yql-20299-default.txt-ForceBlocks] |53.3%| [TS] {RESULT} ydb/tests/stress/topic_kafka/tests/flake8 |53.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 |53.3%| [TS] {RESULT} ydb/tests/functional/cms/flake8 |53.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 |53.3%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |53.3%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 >> CompactionThrottler::CompactionLoad >> test.py::test[json-jsondocument/insert--Results] [GOOD] >> test.py::test[key_filter-complex-default.txt-Results] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-Results] |53.3%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/flake8 |53.3%| [TS] {RESULT} ydb/tests/functional/suite_tests/flake8 |53.3%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |53.4%| [TS] {RESULT} ydb/tests/stress/viewer/tests/import_test |53.4%| [TS] {RESULT} ydb/tests/functional/tpcc/flake8 |53.4%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part1/py2_flake8 |53.4%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::test[simple_columns-simple_columns_subreq_all-default.txt-Results] [GOOD] >> test.py::test[select-logical_ops-default.txt-Results] [GOOD] >> test.py::test[select-match_clause--Results] |53.4%| [TS] {RESULT} ydb/tests/olap/oom/flake8 |53.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest |53.4%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/import_test |53.4%| [LD] {RESULT} $(B)/ydb/core/fq/libs/compute/common/ut/ydb-core-fq-libs-compute-common-ut |53.4%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |53.4%| [TS] {RESULT} ydb/core/tx/columnshard/tools/visualize_portions/flake8 |53.4%| [TS] {RESULT} ydb/tests/functional/ydb_cli/import_test >> test.py::test[table_range-concat_sorted_with_key_diff--Results] |53.4%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 |53.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 |53.4%| [TS] {RESULT} ydb/tests/functional/statistics/flake8 |53.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part12/py2_flake8 |53.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 |53.4%| [TS] {RESULT} ydb/tests/olap/column_family/compression/flake8 |53.4%| [TS] {RESULT} ydb/core/viewer/tests/flake8 |53.4%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part5/py2_flake8 |53.4%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 |53.4%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 |53.4%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part1/py2_flake8 |53.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part0/py2_flake8 |53.5%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |53.5%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |53.5%| [TS] {RESULT} ydb/tests/functional/kqp/plan2svg/flake8 |53.5%| [TS] {RESULT} ydb/tests/fq/solomon/flake8 |53.5%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 >> test.py::test[aggregate-subquery_aggregation--Results] [GOOD] >> test.py::test[ansi_idents-order_by-default.txt-Results] >> test.py::test[hor_join-row_num_per_sect--ForceBlocks] [GOOD] >> test.py::test[hor_join-row_num_per_sect--Results] |53.5%| [TS] {RESULT} ydb/tests/functional/encryption/import_test |53.5%| [TS] {RESULT} ydb/tests/functional/secrets/flake8 |53.5%| [TS] {RESULT} ydb/tests/datashard/vector_index/medium/import_test |53.5%| [TS] {RESULT} ydb/tests/stress/cdc/tests/flake8 |53.5%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |53.5%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part18/py2_flake8 |53.5%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |53.5%| [TS] {RESULT} ydb/tests/stress/s3_backups/tests/flake8 |53.5%| [TS] {RESULT} ydb/tests/stress/kafka/tests/flake8 |53.5%| [TS] {RESULT} ydb/tests/library/compatibility/binaries/downloader/flake8 |53.5%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 |53.5%| [TS] {RESULT} ydb/tests/stress/kv/tests/flake8 |53.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |53.5%| [TS] {RESULT} ydb/tests/functional/tpcc/import_test |53.5%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/flake8 |53.5%| [TS] {RESULT} ydb/tests/fq/streaming_optimize/flake8 |53.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 |53.6%| [TS] {RESULT} ydb/tests/datashard/dump_restore/flake8 |53.6%| [TS] {RESULT} ydb/tests/functional/secrets/import_test |53.6%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_selfheal/ydb-core-mind-bscontroller-ut_selfheal |53.6%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 |53.6%| [TS] {RESULT} ydb/tests/stress/ctas/tests/flake8 |53.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_osiris/ydb-core-blobstorage-ut_blobstorage-ut_osiris |53.6%| [TS] {RESULT} ydb/tests/datashard/secondary_index/flake8 >> test.py::test[type_v3-uuid--Results] |53.6%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part10/py2_flake8 |53.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part9/py2_flake8 |53.6%| [TS] {RESULT} ydb/tests/fq/common/flake8 |53.6%| [TS] {RESULT} ydb/tests/olap/load/flake8 |53.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |53.6%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part13/py2_flake8 |53.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |53.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_huge/ydb-core-blobstorage-ut_blobstorage-ut_huge |53.6%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |53.6%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |53.6%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |53.6%| [TS] {RESULT} ydb/library/yql/tests/sql/hybrid_file/part6/py2_flake8 |53.6%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 |53.6%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dqrun/dqrun |53.6%| [TS] {RESULT} ydb/tests/olap/scenario/flake8 |53.7%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |53.7%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 |53.7%| [TS] {RESULT} ydb/tests/functional/minidumps/import_test |53.7%| [TS] {RESULT} ydb/tests/stress/show_create/view/tests/flake8 |53.7%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |53.7%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |53.7%| [TS] {RESULT} ydb/tests/olap/common/flake8 |53.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 |53.7%| [TS] {RESULT} ydb/tests/sql/large/flake8 |53.7%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 |53.7%| [TS] {RESULT} ydb/tests/functional/query_cache/import_test |53.7%| [TS] {RESULT} ydb/tests/functional/clickbench/flake8 |53.7%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/import_test |53.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part17/py2_flake8 |53.7%| [TS] {RESULT} ydb/library/yql/tests/sql/dq_file/part11/py2_flake8 |53.7%| [TS] {RESULT} ydb/tests/functional/rename/flake8 >> test.py::test[action-eval_on_modif_table_fail--Results] [GOOD] >> test.py::test[action-eval_range--Results] >> test.py::test[join-mergejoin_with_different_key_names--Results] |53.7%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 |53.7%| [TS] {RESULT} ydb/tests/fq/streaming/flake8 |53.7%| [TS] {RESULT} ydb/tests/datashard/dump_restore/import_test >> test.py::test[join-three_equalities_paren--Results] [GOOD] >> test.py::test[optimizers-yql-9297_publish_ytcopy--Results] [GOOD] >> test.py::test[key_filter-is_null_or_data--ForceBlocks] >> test.py::test[order_by-extract_members_over_sort_desc--Results] |53.8%| [TS] {RESULT} ydb/tests/sql/large/import_test |53.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_stop_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_stop_pdisk >> test.py::test[blocks-lazy_nonstrict_basic--ForceBlocks] [GOOD] >> test.py::test[blocks-lazy_nonstrict_basic--Results] >> test.py::test[pg-select_starref1-default.txt-Results] [GOOD] >> test.py::test[pg-table_func-default.txt-Results] >> ydb-tests-olap-scenario::import_test [GOOD] >> test.py::test[aggregate-having_cast-default.txt-Results] >> test.py::test[key_filter-utf8_with_legacy--Results] [GOOD] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt-Results] >> test.py::test[produce-reduce_all_opt-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_with_python_input_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in--Results] |54.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/olap/scenario/import_test >> ydb-tests-olap-scenario::import_test [GOOD] |54.1%| [TS] {RESULT} ydb/tests/olap/scenario/import_test |54.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_scrub/ydb-core-blobstorage-ut_blobstorage-ut_scrub >> test.py::test[hor_join-row_num_per_sect--Results] [GOOD] >> test.py::test[join-filter_joined-off-ForceBlocks] >> test.py::test[join-premap_common_inner_both_sides--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_inner_both_sides--Results] |54.1%| [LD] {RESULT} $(B)/ydb/apps/etcd_proxy/etcd_proxy >> ydb-tests-datashard-add_column::import_test [GOOD] >> test.py::test[select-literal_negative-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-literal_negative-default.txt-Results] >> test.py::test[select-exists_with_table-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-exists_with_table-default.txt-Results] |54.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/columnshard_view.cpp >> test.py::test[schema-select_all_inferschema_op--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema_op--Results] >> test.py::test[udf-two_regexps--ForceBlocks] [GOOD] >> test.py::test[udf-two_regexps--Results] |54.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/columnshard_view.cpp |54.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_move_pdisk/ydb-core-blobstorage-ut_blobstorage-ut_move_pdisk |54.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/events.cpp >> test.py::test[join-group_compact_by--ForceBlocks] [GOOD] >> test.py::test[join-group_compact_by--Results] |54.1%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/tests/datashard/add_column/import_test >> ydb-tests-datashard-add_column::import_test [GOOD] |54.1%| [TS] {RESULT} ydb/tests/datashard/add_column/import_test >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-Results] [GOOD] >> test.py::test[table_range-partition_list_view--Results] [SKIPPED] >> test.py::test[tpch-q16-default.txt-Results] >> test.py::test[optimizers-aggregate_over_aggregate--Results] [GOOD] >> test.py::test[optimizers-fuse_map_mapreduce_multi_input--Results] [SKIPPED] >> test.py::test[optimizers-yql-15210_sqlin--Results] [SKIPPED] >> test.py::test[order_by-assume_over_input--Results] >> test.py::test[join-full_trivial_udf_call-off-ForceBlocks] >> test.py::test[blocks-lazy_nonstrict_basic--Results] [GOOD] >> test.py::test[case-case_multi_val-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-20299-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-20299-default.txt-Results] >> test.py::test[select-match_clause--Results] [GOOD] >> test.py::test[select-swap_columns-default.txt-Results] >> test.py::test[ansi_idents-order_by-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Results] |54.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_blob_depot_fat/blobstorage-ut_blobstorage-ut_blob_depot_fat |54.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/events.cpp |54.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a >> test.py::test[schema-select_all-row_spec_diff_sort2-ForceBlocks] |54.1%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |54.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_tables.cpp |54.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_pdisk/ut_blobstorage-ut_read_only_pdisk >> test.py::test[table_range-concat_sorted_with_key_diff--Results] [GOOD] >> test.py::test[table_range-merge_non_strict--Results] >> test.py::test[action-eval_range--Results] [GOOD] >> test.py::test[action-eval_skip_take--Results] |54.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/cleanup_tables.cpp |54.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_statestorage/core-blobstorage-ut_blobstorage-ut_statestorage |54.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_group_reconfiguration/ut_group_reconfiguration |54.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_check_integrity/ut_blobstorage-ut_check_integrity |54.1%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |54.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Mirror3dc [GOOD] Test command err: RandomSeed# 17434831990419961889 Step = 0 SEND TEvPut with key [1:1:0:0:0:51943:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:51943:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:85877:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:85877:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:192081:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:192081:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:267203:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:267203:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 3 2025-11-03T14:01:08.990397Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 6 SEND TEvPut with key [1:1:6:0:0:377427:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:377427:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2025-11-03T14:01:09.015353Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 7 SEND TEvPut with key [1:1:7:0:0:48850:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:48850:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 8 SEND TEvPut with key [1:1:8:0:0:411812:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:411812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 9 SEND TEvPut with key [1:1:9:0:0:293766:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:293766:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Start node 3 Step = 10 SEND TEvPut with key [1:1:10:0:0:127358:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:127358:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 11 SEND TEvPut with key [1:1:11:0:0:282945:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:282945:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 12 SEND TEvPut with key [1:1:12:0:0:34864:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:34864:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 13 SEND TEvPut with key [1:1:13:0:0:363096:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:363096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 15 SEND TEvPut with key [1:1:15:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 16 SEND TEvPut with key [1:1:16:0:0:136892:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:136892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 17 SEND TEvPut with key [1:1:17:0:0:517733:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:517733:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 18 SEND TEvPut with key [1:1:18:0:0:250802:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:250802:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 19 SEND TEvPut with key [1:1:19:0:0:199490:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:199490:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 20 SEND TEvPut with key [1:1:20:0:0:244269:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:244269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 21 SEND TEvPut with key [1:1:21:0:0:329606:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:329606:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 23 SEND TEvPut with key [1:1:23:0:0:519258:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:519258:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 25 SEND TEvPut with key [1:1:25:0:0:514591:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:514591:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Stop node 7 2025-11-03T14:01:09.122283Z 1 00h01m30.100512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 26 SEND TEvPut with key [1:1:26:0:0:5927:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:5927:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 28 SEND TEvPut with key [1:1:28:0:0:6043:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:6043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 30 SEND TEvPut with key [1:1:30:0:0:264716:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:264716:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Compact vdisk 3 Step = 31 SEND TEvPut with key [1:1:31:0:0:168116:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:168116:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 32 SEND TEvPut with key [1:1:32:0:0:444749:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:444749:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 33 SEND TEvPut with key [1:1:33:0:0:350254:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:350254:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 34 SEND TEvPut with key [1:1:34:0:0:145950:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:145950:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 35 SEND TEvPut with key [1:1:35:0:0:358611:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:358611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 38 SEND TEvPut with key [1:1:38:0:0:185170:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:185170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 39 SEND TEvPut with key [1:1:39:0:0:297271:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:297271:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 40 SEND TEvPut with key [1:1:40:0:0:419670:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:419670:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 41 SEND TEvPut with key [1:1:41:0:0:218956:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:218956:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 42 SEND TEvPut with key [1:1:42:0:0:154723:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:154723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 43 SEND TEvPut with key [1:1:43:0:0:13332:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:13332:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 44 SEND TEvPut with key [1:1:44:0:0:448892:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:448892:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 45 SEND TEvPut with key [1:1:45:0:0:103231:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:103231:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 46 SEND TEvPut with key [1:1:46:0:0:295973:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:295973:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 47 SEND TEvPut with key [1:1:47:0:0:402799:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:402799:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 48 SEND TEvPut with key [1:1:48:0:0:165045:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:165045:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 49 SEND TEvPut with key [1:1:49:0:0:360099:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:360099:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 50 SEND TEvPut with key [1:1:50:0:0:97222:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:97222:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 51 SEND TEvPut with key [1:1:51:0:0:303396:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:303396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 52 SEND TEvPut with key [1:1:52:0:0:304876:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:304876:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 53 SEND TEvPut with key [1:1:53:0:0:375063:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:375063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Start node 4 Step = 54 SEND TEvPut with key [1:1:54:0:0:288044:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:288044:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 55 SEND TEvPut with key [1:1:55:0:0:181559:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:181559:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 57 SEND TEvPut with key [1:1:57:0:0:424399:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:424399:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Step = 58 SEND TEvPut with key [1:1:58:0:0:169341:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:169341:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 59 SEND TEvPut with key [1:1:59:0:0:405932:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:405932:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999915} Step = 60 SEND TEvPut with key [1:1:60:0:0:190148:0] TEvPutResult: TEvPutResult {Id# [1:1:60:0:0:190148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999927} Stop node 3 2025-11-03T14:01:09.274385Z 1 00h02m00.150512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Wipe node 0 2025-11-03T14:01:09.292196Z 1 00h02m10.161024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:01:09.292420Z 1 00h02m10.161024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 2644218589274191495] 2025-11-03T14:01:09.301228Z 1 00h02m15.161024s :BS_PROXY ERROR: StateEstablishingSessions Wakeup TIMEOUT Marker# DSP12 GroupId# 2181038080 EstablishingSessionsStateTs# 130.161024s NumUnconnectedDisks# 3 Step = 61 SEND TEvPut with key [1:1:61:0:0:500240:0] TEvPutResult: TEvPutResult {Id# [1:1:61:0:0:500240:0] Status# ERROR StatusFlags# { } ErrorReason# "Timeout while establishing sessions (DSPE4). GroupId# 2181038080 EstablishingSessionsStateTs# 130.161024s NumUnconnectedDisks# 3" ApproximateFreeSpaceShare# 0} Step = 62 SEND TEvPut with key [1:1:62:0:0:354994:0] TEvPutResult: ... 1:1:945:0:0:76599:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Compact vdisk 2 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 948 SEND TEvPut with key [1:1:948:0:0:112126:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:112126:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 949 SEND TEvPut with key [1:1:949:0:0:525378:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:525378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 950 SEND TEvPut with key [1:1:950:0:0:410875:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:410875:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 951 SEND TEvPut with key [1:1:951:0:0:113503:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:113503:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 952 SEND TEvPut with key [1:1:952:0:0:431140:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:431140:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 953 SEND TEvPut with key [1:1:953:0:0:509293:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:509293:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 3 2025-11-03T14:01:20.203001Z 1 00h25m01.113584s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:286395:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:286395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Stop node 1 2025-11-03T14:01:20.248918Z 1 00h25m11.114096s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 955 SEND TEvPut with key [1:1:955:0:0:219270:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:219270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Start node 1 Step = 956 SEND TEvPut with key [1:1:956:0:0:274971:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:274971:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999792} Step = 957 SEND TEvPut with key [1:1:957:0:0:487884:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:487884:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Start node 3 Step = 958 SEND TEvPut with key [1:1:958:0:0:327302:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:327302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 961 SEND TEvPut with key [1:1:961:0:0:61147:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:61147:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 962 SEND TEvPut with key [1:1:962:0:0:237906:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:237906:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 963 SEND TEvPut with key [1:1:963:0:0:347273:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:347273:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 964 SEND TEvPut with key [1:1:964:0:0:181317:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:181317:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 965 SEND TEvPut with key [1:1:965:0:0:456096:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:456096:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 966 SEND TEvPut with key [1:1:966:0:0:93776:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:93776:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 967 SEND TEvPut with key [1:1:967:0:0:447659:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:447659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 969 SEND TEvPut with key [1:1:969:0:0:92781:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:92781:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999805} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Stop node 0 2025-11-03T14:01:20.460916Z 9 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [9:116240:351] ServerId# [1:117283:176] TabletId# 72057594037932033 PipeClientId# [9:116240:351] 2025-11-03T14:01:20.460969Z 8 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [8:146847:17] ServerId# [1:146857:4101] TabletId# 72057594037932033 PipeClientId# [8:146847:17] 2025-11-03T14:01:20.460994Z 7 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:145797:17] ServerId# [1:145807:3973] TabletId# 72057594037932033 PipeClientId# [7:145797:17] 2025-11-03T14:01:20.461036Z 6 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:122888:17] ServerId# [1:122895:1015] TabletId# 72057594037932033 PipeClientId# [6:122888:17] 2025-11-03T14:01:20.461066Z 5 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:142889:17] ServerId# [1:142898:3593] TabletId# 72057594037932033 PipeClientId# [5:142889:17] 2025-11-03T14:01:20.461086Z 4 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:151818:17] ServerId# [1:151828:4701] TabletId# 72057594037932033 PipeClientId# [4:151818:17] 2025-11-03T14:01:20.461118Z 3 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:141793:17] ServerId# [1:141803:3468] TabletId# 72057594037932033 PipeClientId# [3:141793:17] 2025-11-03T14:01:20.461164Z 2 00h25m41.151024s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:150854:17] ServerId# [1:150861:4592] TabletId# 72057594037932033 PipeClientId# [2:150854:17] Step = 971 SEND TEvPut with key [1:1:971:0:0:439384:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:439384:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 972 SEND TEvPut with key [1:1:972:0:0:252551:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:252551:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 973 SEND TEvPut with key [1:1:973:0:0:39982:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:39982:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Stop node 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:526796:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:526796:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Start node 0 Step = 975 SEND TEvPut with key [1:1:975:0:0:337763:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:337763:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Stop node 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:475740:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:475740:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 977 SEND TEvPut with key [1:1:977:0:0:169780:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:169780:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 980 SEND TEvPut with key [1:1:980:0:0:159890:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:159890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 981 SEND TEvPut with key [1:1:981:0:0:111300:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:111300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 982 SEND TEvPut with key [1:1:982:0:0:355914:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:355914:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 983 SEND TEvPut with key [1:1:983:0:0:399106:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:399106:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 985 SEND TEvPut with key [1:1:985:0:0:261994:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:261994:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Step = 987 SEND TEvPut with key [1:1:987:0:0:138774:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:138774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 988 SEND TEvPut with key [1:1:988:0:0:441913:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:441913:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 989 SEND TEvPut with key [1:1:989:0:0:134469:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:134469:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 990 SEND TEvPut with key [1:1:990:0:0:123825:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:123825:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 991 SEND TEvPut with key [1:1:991:0:0:40387:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:40387:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Stop node 7 2025-11-03T14:01:20.951255Z 1 00h26m21.153072s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 993 SEND TEvPut with key [1:1:993:0:0:455894:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:455894:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Compact vdisk 0 Step = 994 SEND TEvPut with key [1:1:994:0:0:54378:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:54378:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Compact vdisk 6 Step = 995 SEND TEvPut with key [1:1:995:0:0:487669:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:487669:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999829} Step = 996 SEND TEvPut with key [1:1:996:0:0:194641:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:194641:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 997 SEND TEvPut with key [1:1:997:0:0:74188:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:74188:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 998 SEND TEvPut with key [1:1:998:0:0:136082:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:136082:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Step = 999 SEND TEvPut with key [1:1:999:0:0:145518:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:145518:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Starting nodes Start compaction 1 Start checking |54.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |54.2%| [LD] {RESULT} $(B)/ydb/library/yql/tools/dq/worker_node/worker_node |54.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a >> test.py::test[type_v3-uuid--Results] [GOOD] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-ForceBlocks] >> test.py::test[select-literal_negative-default.txt-Results] [GOOD] >> test.py::test[select-optional_pull--ForceBlocks] >> test.py::test[select-exists_with_table-default.txt-Results] [GOOD] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-ForceBlocks] >> test.py::test[key_filter-complex-default.txt-Results] [GOOD] >> test.py::test[key_filter-dependent_value-default.txt-Results] |54.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/operations/slice_builder/builder.cpp |54.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/operations/slice_builder/builder.cpp >> test.py::test[udf-two_regexps--Results] [GOOD] >> test.py::test[udf-udaf--ForceBlocks] >> test.py::test[schema-select_all_inferschema_op--Results] [GOOD] >> test.py::test[schema-user_schema_patch_columns--ForceBlocks] |54.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |54.2%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |54.2%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a >> test.py::test[join-premap_common_inner_both_sides--Results] [GOOD] >> test.py::test[join-pullup_extra_columns--ForceBlocks] >> test.py::test[blocks-combine_all_min_filter_opt--ForceBlocks] >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--Results] >> test.py::test[optimizers-yql-20299-default.txt-Results] [GOOD] >> test.py::test[order_by-literal_with_assume--ForceBlocks] >> test.py::test[key_filter-is_null_or_data--ForceBlocks] [GOOD] >> test.py::test[key_filter-is_null_or_data--Results] >> test.py::test[lambda-lambda_use_labmda_as_arg-default.txt-Results] [GOOD] >> test.py::test[limit-dynamic_limit--Results] [SKIPPED] >> test.py::test[limit-limit_over_sort_desc_in_subquery--Results] [SKIPPED] >> test.py::test[lineage-reduce-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_group_by_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_union_all-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-local_tc_with_force-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-test_no_aggregate_split--Results] >> test.py::test[join-yql-8131--Results] [SKIPPED] >> test.py::test[key_filter-contains_tuples-default.txt-ForceBlocks] >> test.py::test[join-group_compact_by--Results] [GOOD] >> test.py::test[join-inner_all--ForceBlocks] >> test.py::test[aggregate-having_cast-default.txt-Results] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_ru_join_agg--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-Results] >> VDiskBalancing::TestRandom_Block42 [GOOD] >> test.py::test[order_by-extract_members_over_sort_desc--Results] [GOOD] >> test.py::test[order_by-native_desc_publish--Results] [SKIPPED] >> test.py::test[order_by-order_by_expr--Results] >> test.py::test[join-filter_joined-off-ForceBlocks] [GOOD] >> test.py::test[join-filter_joined-off-Results] [SKIPPED] >> test.py::test[join-from_in_front_join-off-ForceBlocks] >> test.py::test[key_filter-is_null_or_data--Results] [GOOD] >> test.py::test[lineage-some_tablerow-default.txt-ForceBlocks] >> test.py::test[lineage-some_tablerow-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-some_tablerow-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-externaltx-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[multicluster-externaltx-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-direct_row_after_merge--ForceBlocks] >> test.py::test[order_by-assume_over_input--Results] [GOOD] >> test.py::test[order_by-literal_take_zero_sort--Results] >> test.py::test[select-swap_columns-default.txt-Results] [GOOD] >> test.py::test[select-unlabeled_1000--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_balancing/unittest >> VDiskBalancing::TestRandom_Block42 [GOOD] Test command err: RandomSeed# 17497473526600265312 Step = 0 SEND TEvPut with key [1:1:0:0:0:585447:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:585447:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 1 SEND TEvPut with key [1:1:1:0:0:37868:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:37868:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 2 SEND TEvPut with key [1:1:2:0:0:619381:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:619381:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 3 SEND TEvPut with key [1:1:3:0:0:725585:0] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:725585:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Step = 4 SEND TEvPut with key [1:1:4:0:0:2934723:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:2934723:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Stop node 4 2025-11-03T14:00:59.318001Z 1 00h01m00.010512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 5 SEND TEvPut with key [1:1:5:0:0:502135:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:502135:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Step = 6 SEND TEvPut with key [1:1:6:0:0:3044947:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:3044947:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} Stop node 7 2025-11-03T14:00:59.344513Z 1 00h01m10.060512s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 7 SEND TEvPut with key [1:1:7:0:0:582354:0] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:582354:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 8 SEND TEvPut with key [1:1:8:0:0:1478820:0] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:1478820:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Step = 9 SEND TEvPut with key [1:1:9:0:0:1360774:0] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:1360774:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} Start node 4 Step = 10 SEND TEvPut with key [1:1:10:0:0:1727870:0] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:1727870:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 11 SEND TEvPut with key [1:1:11:0:0:1883457:0] TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:1883457:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 12 SEND TEvPut with key [1:1:12:0:0:568368:0] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:568368:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 13 SEND TEvPut with key [1:1:13:0:0:896600:0] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:896600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 14 SEND TEvPut with key [1:1:14:0:0:179270:0] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:179270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 15 SEND TEvPut with key [1:1:15:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 16 SEND TEvPut with key [1:1:16:0:0:670396:0] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:670396:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 17 SEND TEvPut with key [1:1:17:0:0:1584741:0] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:1584741:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 18 SEND TEvPut with key [1:1:18:0:0:2384818:0] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:2384818:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 19 SEND TEvPut with key [1:1:19:0:0:2867010:0] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:2867010:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 20 SEND TEvPut with key [1:1:20:0:0:2911789:0] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:2911789:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 21 SEND TEvPut with key [1:1:21:0:0:2463622:0] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:2463622:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 22 SEND TEvPut with key [1:1:22:0:0:322338:0] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:322338:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 23 SEND TEvPut with key [1:1:23:0:0:2119770:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:2119770:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 24 SEND TEvPut with key [1:1:24:0:0:56036:0] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:56036:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Step = 25 SEND TEvPut with key [1:1:25:0:0:2648607:0] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:2648607:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999939} Stop node 0 2025-11-03T14:00:59.553340Z 3 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [3:194:17] ServerId# [1:304:64] TabletId# 72057594037932033 PipeClientId# [3:194:17] 2025-11-03T14:00:59.553400Z 6 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:215:17] ServerId# [1:307:67] TabletId# 72057594037932033 PipeClientId# [6:215:17] 2025-11-03T14:00:59.553425Z 5 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [5:7685:16] ServerId# [1:7694:1099] TabletId# 72057594037932033 PipeClientId# [5:7685:16] 2025-11-03T14:00:59.553451Z 4 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [4:201:17] ServerId# [1:305:65] TabletId# 72057594037932033 PipeClientId# [4:201:17] 2025-11-03T14:00:59.553475Z 2 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:187:17] ServerId# [1:303:63] TabletId# 72057594037932033 PipeClientId# [2:187:17] 2025-11-03T14:00:59.553518Z 7 00h01m30.100512s :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [7:222:17] ServerId# [1:308:68] TabletId# 72057594037932033 PipeClientId# [7:222:17] Step = 26 SEND TEvPut with key [1:1:26:0:0:539431:0] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:539431:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 27 SEND TEvPut with key [1:1:27:0:0:148482:0] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:148482:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 28 SEND TEvPut with key [1:1:28:0:0:2673563:0] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:2673563:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 29 SEND TEvPut with key [1:1:29:0:0:265170:0] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:265170:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 30 SEND TEvPut with key [1:1:30:0:0:2398732:0] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:2398732:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Compact vdisk 2 Step = 31 SEND TEvPut with key [1:1:31:0:0:2302132:0] TEvPutResult: TEvPutResult {Id# [1:1:31:0:0:2302132:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 32 SEND TEvPut with key [1:1:32:0:0:3112269:0] TEvPutResult: TEvPutResult {Id# [1:1:32:0:0:3112269:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 33 SEND TEvPut with key [1:1:33:0:0:883758:0] TEvPutResult: TEvPutResult {Id# [1:1:33:0:0:883758:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 34 SEND TEvPut with key [1:1:34:0:0:1212958:0] TEvPutResult: TEvPutResult {Id# [1:1:34:0:0:1212958:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 35 SEND TEvPut with key [1:1:35:0:0:3026131:0] TEvPutResult: TEvPutResult {Id# [1:1:35:0:0:3026131:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 36 SEND TEvPut with key [1:1:36:0:0:139148:0] TEvPutResult: TEvPutResult {Id# [1:1:36:0:0:139148:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 37 SEND TEvPut with key [1:1:37:0:0:200198:0] TEvPutResult: TEvPutResult {Id# [1:1:37:0:0:200198:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 38 SEND TEvPut with key [1:1:38:0:0:1252178:0] TEvPutResult: TEvPutResult {Id# [1:1:38:0:0:1252178:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 39 SEND TEvPut with key [1:1:39:0:0:1897783:0] TEvPutResult: TEvPutResult {Id# [1:1:39:0:0:1897783:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 40 SEND TEvPut with key [1:1:40:0:0:1486678:0] TEvPutResult: TEvPutResult {Id# [1:1:40:0:0:1486678:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 41 SEND TEvPut with key [1:1:41:0:0:1285964:0] TEvPutResult: TEvPutResult {Id# [1:1:41:0:0:1285964:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 42 SEND TEvPut with key [1:1:42:0:0:1221731:0] TEvPutResult: TEvPutResult {Id# [1:1:42:0:0:1221731:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 43 SEND TEvPut with key [1:1:43:0:0:1613844:0] TEvPutResult: TEvPutResult {Id# [1:1:43:0:0:1613844:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 44 SEND TEvPut with key [1:1:44:0:0:2582908:0] TEvPutResult: TEvPutResult {Id# [1:1:44:0:0:2582908:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 45 SEND TEvPut with key [1:1:45:0:0:1703743:0] TEvPutResult: TEvPutResult {Id# [1:1:45:0:0:1703743:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 46 SEND TEvPut with key [1:1:46:0:0:1362981:0] TEvPutResult: TEvPutResult {Id# [1:1:46:0:0:1362981:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 47 SEND TEvPut with key [1:1:47:0:0:1469807:0] TEvPutResult: TEvPutResult {Id# [1:1:47:0:0:1469807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 48 SEND TEvPut with key [1:1:48:0:0:2832565:0] TEvPutResult: TEvPutResult {Id# [1:1:48:0:0:2832565:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 49 SEND TEvPut with key [1:1:49:0:0:1960611:0] TEvPutResult: TEvPutResult {Id# [1:1:49:0:0:1960611:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 50 SEND TEvPut with key [1:1:50:0:0:1164230:0] TEvPutResult: TEvPutResult {Id# [1:1:50:0:0:1164230:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 51 SEND TEvPut with key [1:1:51:0:0:836900:0] TEvPutResult: TEvPutResult {Id# [1:1:51:0:0:836900:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 52 SEND TEvPut with key [1:1:52:0:0:838380:0] TEvPutResult: TEvPutResult {Id# [1:1:52:0:0:838380:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Step = 53 SEND TEvPut with key [1:1:53:0:0:1975575:0] TEvPutResult: TEvPutResult {Id# [1:1:53:0:0:1975575:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99989} Start node 0 Step = 54 SEND TEvPut with key [1:1:54:0:0:1888556:0] TEvPutResult: TEvPutResult {Id# [1:1:54:0:0:1888556:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 55 SEND TEvPut with key [1:1:55:0:0:715063:0] TEvPutResult: TEvPutResult {Id# [1:1:55:0:0:715063:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 56 SEND TEvPut with key [1:1:56:0:0:42993:0] TEvPutResult: TEvPutResult {Id# [1:1:56:0:0:42993:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 57 SEND TEvPut with key [1:1:57:0:0:1491407:0] TEvPutResult: TEvPutResult {Id# [1:1:57:0:0:1491407:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 58 SEND TEvPut with key [1:1:58:0:0:702845:0] TEvPutResult: TEvPutResult {Id# [1:1:58:0:0:702845:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999817} Step = 59 SEND TEvPut with key [1:1:59:0:0:2539948:0] TEvPutResult: TEvPutResult {Id# [1:1:59:0:0:2539948:0] Statu ... 7} Step = 936 SEND TEvPut with key [1:1:936:0:0:2748248:0] TEvPutResult: TEvPutResult {Id# [1:1:936:0:0:2748248:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 937 SEND TEvPut with key [1:1:937:0:0:112302:0] TEvPutResult: TEvPutResult {Id# [1:1:937:0:0:112302:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 938 SEND TEvPut with key [1:1:938:0:0:800417:0] TEvPutResult: TEvPutResult {Id# [1:1:938:0:0:800417:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 939 SEND TEvPut with key [1:1:939:0:0:2336442:0] TEvPutResult: TEvPutResult {Id# [1:1:939:0:0:2336442:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 940 SEND TEvPut with key [1:1:940:0:0:982070:0] TEvPutResult: TEvPutResult {Id# [1:1:940:0:0:982070:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999695} Start node 4 Step = 941 SEND TEvPut with key [1:1:941:0:0:713632:0] TEvPutResult: TEvPutResult {Id# [1:1:941:0:0:713632:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 942 SEND TEvPut with key [1:1:942:0:0:1644191:0] TEvPutResult: TEvPutResult {Id# [1:1:942:0:0:1644191:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 943 SEND TEvPut with key [1:1:943:0:0:254634:0] TEvPutResult: TEvPutResult {Id# [1:1:943:0:0:254634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 944 SEND TEvPut with key [1:1:944:0:0:1141270:0] TEvPutResult: TEvPutResult {Id# [1:1:944:0:0:1141270:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Step = 945 SEND TEvPut with key [1:1:945:0:0:610103:0] TEvPutResult: TEvPutResult {Id# [1:1:945:0:0:610103:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Step = 946 SEND TEvPut with key [1:1:946:0:0:24822:0] TEvPutResult: TEvPutResult {Id# [1:1:946:0:0:24822:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999707} Compact vdisk 6 Step = 947 SEND TEvPut with key [1:1:947:0:0:100167:0] TEvPutResult: TEvPutResult {Id# [1:1:947:0:0:100167:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999585} Step = 948 SEND TEvPut with key [1:1:948:0:0:645630:0] TEvPutResult: TEvPutResult {Id# [1:1:948:0:0:645630:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999585} Step = 949 SEND TEvPut with key [1:1:949:0:0:2125890:0] TEvPutResult: TEvPutResult {Id# [1:1:949:0:0:2125890:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999585} Step = 950 SEND TEvPut with key [1:1:950:0:0:2544891:0] TEvPutResult: TEvPutResult {Id# [1:1:950:0:0:2544891:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999585} Step = 951 SEND TEvPut with key [1:1:951:0:0:647007:0] TEvPutResult: TEvPutResult {Id# [1:1:951:0:0:647007:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999585} Step = 952 SEND TEvPut with key [1:1:952:0:0:2031652:0] TEvPutResult: TEvPutResult {Id# [1:1:952:0:0:2031652:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999585} Step = 953 SEND TEvPut with key [1:1:953:0:0:2109805:0] TEvPutResult: TEvPutResult {Id# [1:1:953:0:0:2109805:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999585} Stop node 3 2025-11-03T14:01:21.758531Z 1 00h25m30.785520s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 4 Step = 954 SEND TEvPut with key [1:1:954:0:0:1353403:0] TEvPutResult: TEvPutResult {Id# [1:1:954:0:0:1353403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999744} Stop node 4 2025-11-03T14:01:21.818101Z 1 00h25m40.787056s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 5 Step = 955 SEND TEvPut with key [1:1:955:0:0:1286278:0] TEvPutResult: TEvPutResult {Id# [1:1:955:0:0:1286278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Start node 3 Step = 956 SEND TEvPut with key [1:1:956:0:0:1875483:0] TEvPutResult: TEvPutResult {Id# [1:1:956:0:0:1875483:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 957 SEND TEvPut with key [1:1:957:0:0:1021388:0] TEvPutResult: TEvPutResult {Id# [1:1:957:0:0:1021388:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999683} Start node 4 Step = 958 SEND TEvPut with key [1:1:958:0:0:860806:0] TEvPutResult: TEvPutResult {Id# [1:1:958:0:0:860806:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 959 SEND TEvPut with key [1:1:959:0:0:385917:0] TEvPutResult: TEvPutResult {Id# [1:1:959:0:0:385917:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 960 SEND TEvPut with key [1:1:960:0:0:200998:0] TEvPutResult: TEvPutResult {Id# [1:1:960:0:0:200998:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 961 SEND TEvPut with key [1:1:961:0:0:1661659:0] TEvPutResult: TEvPutResult {Id# [1:1:961:0:0:1661659:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 962 SEND TEvPut with key [1:1:962:0:0:771410:0] TEvPutResult: TEvPutResult {Id# [1:1:962:0:0:771410:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 963 SEND TEvPut with key [1:1:963:0:0:1414281:0] TEvPutResult: TEvPutResult {Id# [1:1:963:0:0:1414281:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 964 SEND TEvPut with key [1:1:964:0:0:2848837:0] TEvPutResult: TEvPutResult {Id# [1:1:964:0:0:2848837:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999756} Step = 965 SEND TEvPut with key [1:1:965:0:0:989600:0] TEvPutResult: TEvPutResult {Id# [1:1:965:0:0:989600:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999731} Step = 966 SEND TEvPut with key [1:1:966:0:0:2761296:0] TEvPutResult: TEvPutResult {Id# [1:1:966:0:0:2761296:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 967 SEND TEvPut with key [1:1:967:0:0:981163:0] TEvPutResult: TEvPutResult {Id# [1:1:967:0:0:981163:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 968 SEND TEvPut with key [1:1:968:0:0:14298:0] TEvPutResult: TEvPutResult {Id# [1:1:968:0:0:14298:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 969 SEND TEvPut with key [1:1:969:0:0:626285:0] TEvPutResult: TEvPutResult {Id# [1:1:969:0:0:626285:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Step = 970 SEND TEvPut with key [1:1:970:0:0:334566:0] TEvPutResult: TEvPutResult {Id# [1:1:970:0:0:334566:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.99967} Stop node 7 2025-11-03T14:01:22.068261Z 1 00h26m10.788592s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 8 Step = 971 SEND TEvPut with key [1:1:971:0:0:972888:0] TEvPutResult: TEvPutResult {Id# [1:1:971:0:0:972888:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 972 SEND TEvPut with key [1:1:972:0:0:786055:0] TEvPutResult: TEvPutResult {Id# [1:1:972:0:0:786055:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Step = 973 SEND TEvPut with key [1:1:973:0:0:2707502:0] TEvPutResult: TEvPutResult {Id# [1:1:973:0:0:2707502:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999646} Stop node 1 2025-11-03T14:01:22.128604Z 1 00h26m20.803072s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 974 SEND TEvPut with key [1:1:974:0:0:2660812:0] TEvPutResult: TEvPutResult {Id# [1:1:974:0:0:2660812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Start node 1 Step = 975 SEND TEvPut with key [1:1:975:0:0:3005283:0] TEvPutResult: TEvPutResult {Id# [1:1:975:0:0:3005283:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999719} Stop node 1 2025-11-03T14:01:22.237629Z 1 00h26m40.811536s :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 Step = 976 SEND TEvPut with key [1:1:976:0:0:1542748:0] TEvPutResult: TEvPutResult {Id# [1:1:976:0:0:1542748:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 977 SEND TEvPut with key [1:1:977:0:0:2837300:0] TEvPutResult: TEvPutResult {Id# [1:1:977:0:0:2837300:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 978 SEND TEvPut with key [1:1:978:0:0:481535:0] TEvPutResult: TEvPutResult {Id# [1:1:978:0:0:481535:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 979 SEND TEvPut with key [1:1:979:0:0:24668:0] TEvPutResult: TEvPutResult {Id# [1:1:979:0:0:24668:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 980 SEND TEvPut with key [1:1:980:0:0:1760402:0] TEvPutResult: TEvPutResult {Id# [1:1:980:0:0:1760402:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 981 SEND TEvPut with key [1:1:981:0:0:1711812:0] TEvPutResult: TEvPutResult {Id# [1:1:981:0:0:1711812:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 982 SEND TEvPut with key [1:1:982:0:0:1422922:0] TEvPutResult: TEvPutResult {Id# [1:1:982:0:0:1422922:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 983 SEND TEvPut with key [1:1:983:0:0:2533122:0] TEvPutResult: TEvPutResult {Id# [1:1:983:0:0:2533122:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 984 SEND TEvPut with key [1:1:984:0:0:347759:0] TEvPutResult: TEvPutResult {Id# [1:1:984:0:0:347759:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 985 SEND TEvPut with key [1:1:985:0:0:1862506:0] TEvPutResult: TEvPutResult {Id# [1:1:985:0:0:1862506:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 986 SEND TEvPut with key [1:1:986:0:0:101043:0] TEvPutResult: TEvPutResult {Id# [1:1:986:0:0:101043:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 987 SEND TEvPut with key [1:1:987:0:0:672278:0] TEvPutResult: TEvPutResult {Id# [1:1:987:0:0:672278:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 988 SEND TEvPut with key [1:1:988:0:0:2042425:0] TEvPutResult: TEvPutResult {Id# [1:1:988:0:0:2042425:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 989 SEND TEvPut with key [1:1:989:0:0:1201477:0] TEvPutResult: TEvPutResult {Id# [1:1:989:0:0:1201477:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 990 SEND TEvPut with key [1:1:990:0:0:1724337:0] TEvPutResult: TEvPutResult {Id# [1:1:990:0:0:1724337:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 991 SEND TEvPut with key [1:1:991:0:0:2174403:0] TEvPutResult: TEvPutResult {Id# [1:1:991:0:0:2174403:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 992 SEND TEvPut with key [1:1:992:0:0:193000:0] TEvPutResult: TEvPutResult {Id# [1:1:992:0:0:193000:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 993 SEND TEvPut with key [1:1:993:0:0:618508:0] TEvPutResult: TEvPutResult {Id# [1:1:993:0:0:618508:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 994 SEND TEvPut with key [1:1:994:0:0:2278246:0] TEvPutResult: TEvPutResult {Id# [1:1:994:0:0:2278246:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 995 SEND TEvPut with key [1:1:995:0:0:2001881:0] TEvPutResult: TEvPutResult {Id# [1:1:995:0:0:2001881:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 996 SEND TEvPut with key [1:1:996:0:0:1759634:0] TEvPutResult: TEvPutResult {Id# [1:1:996:0:0:1759634:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 997 SEND TEvPut with key [1:1:997:0:0:2469234:0] TEvPutResult: TEvPutResult {Id# [1:1:997:0:0:2469234:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 998 SEND TEvPut with key [1:1:998:0:0:1329395:0] TEvPutResult: TEvPutResult {Id# [1:1:998:0:0:1329395:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Step = 999 SEND TEvPut with key [1:1:999:0:0:1243807:0] TEvPutResult: TEvPutResult {Id# [1:1:999:0:0:1243807:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999634} Starting nodes Start compaction 1 Start checking >> test.py::test[produce-reduce_multi_in--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_keytuple--Results] >> test.py::test[schema-select_all-row_spec_diff_sort2-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] >> test.py::test[case-case_multi_val-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_multi_val-default.txt-Results] |54.2%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} |54.2%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_balancing/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[key_filter-dependent_value-default.txt-Results] [GOOD] >> test.py::test[limit-sort_calc_limit--Results] >> test.py::test[join-mergejoin_with_different_key_names--Results] [GOOD] >> test.py::test[join-premap_common_right_tablecontent--ForceBlocks] >> test.py::test[join-full_trivial_udf_call-off-ForceBlocks] [GOOD] >> test.py::test[join-full_trivial_udf_call-off-Results] [SKIPPED] >> test.py::test[join-grace_join1--ForceBlocks] >> test.py::test[action-eval_skip_take--Results] [GOOD] >> test.py::test[action-insert_after_eval--Results] >> test.py::test[optimizers-length_over_merge--ForceBlocks] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-ForceBlocks] [GOOD] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-Results] >> test.py::test[table_range-merge_non_strict--Results] [GOOD] >> test.py::test[tpch-q17-default.txt-Results] >> test.py::test[schema-user_schema_patch_columns--ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_patch_columns--Results] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Results] >> test.py::test[schema-select_all-row_spec_diff_sort2-Results] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--ForceBlocks] >> test.py::test[select-optional_pull--ForceBlocks] [GOOD] >> test.py::test[select-optional_pull--Results] >> test.py::test[blocks-combine_all_min_filter_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_min_filter_opt--Results] >> test.py::test[case-case_multi_val-default.txt-Results] [GOOD] >> test.py::test[case-case_then_else-default.txt-ForceBlocks] >> test.py::test[order_by-literal_with_assume--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_with_assume--Results] >> test.py::test[udf-udaf--ForceBlocks] [GOOD] >> test.py::test[udf-udaf--Results] >> test.py::test[order_by-literal--Results] >> test.py::test[join-pullup_extra_columns--ForceBlocks] [GOOD] >> test.py::test[join-pullup_extra_columns--Results] >> test.py::test[join-star_join_mirror-off-Results] [SKIPPED] >> test.py::test[window-distinct_over_window_full_frames--ForceBlocks] >> test.py::test[join-inner_all--ForceBlocks] [GOOD] >> test.py::test[join-inner_all--Results] >> test.py::test[join-yql-12022-off-ForceBlocks] >> test.py::test[seq_mode-shared_subquery_expr_after_commit-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q08-default.txt-Results] >> test.py::test[bigdate-table_arithmetic_sub-default.txt-Results] [GOOD] >> test.py::test[binding-bind_select-default.txt-Results] >> test.py::test[schema-user_schema_patch_columns--Results] [GOOD] >> test.py::test[select-host_count--ForceBlocks] >> test.py::test[pg-drop_table--ForceBlocks] >> test.py::test[order_by-literal_take_zero_sort--Results] [GOOD] >> test.py::test[order_by-order_by_expr_over_sorted_table--Results] |54.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/secondary.cpp |54.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_from_source.cpp |54.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/operators/ev_write/secondary.cpp |54.2%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/transactions/tx_controller.cpp >> test.py::test[order_by-literal_with_assume--Results] [GOOD] >> test.py::test[order_by-sort_decimals--ForceBlocks] |54.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_finish_from_source.cpp |54.2%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/transactions/tx_controller.cpp >> test.py::test[blocks-combine_all_min_filter_opt--Results] [GOOD] >> test.py::test[blocks-decimal_avg--ForceBlocks] >> test.py::test[select-optional_pull--Results] [GOOD] >> test.py::test[select-select_all_filtered-default.txt-ForceBlocks] >> test.py::test[union_all-union_all_with_top_level_limits_ansi-default.txt-Results] [GOOD] >> test.py::test[view-file_inner_udf--ForceBlocks] [SKIPPED] >> test.py::test[view-file_inner_udf--Results] [SKIPPED] >> test.py::test[weak_field-weak_field_join--ForceBlocks] >> test.py::test[pg-table_func-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q07-default.txt-Results] >> test.py::test[flatten_by-flatten_dict_by_opt--Results] |54.2%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |54.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a >> test.py::test[order_by-order_by_expr--Results] [GOOD] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Results] >> test.py::test[optimizers-direct_row_after_merge--ForceBlocks] [GOOD] >> test.py::test[optimizers-direct_row_after_merge--Results] >> test.py::test[join-from_in_front_join-off-ForceBlocks] [GOOD] >> test.py::test[join-from_in_front_join-off-Results] [SKIPPED] >> test.py::test[join-grace_join1-off-ForceBlocks] |54.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |54.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a >> test.py::test[udf-udaf--Results] [GOOD] >> test.py::test[view-system_udf--ForceBlocks] |54.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |54.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a >> test.py::test[aggregate-group_by_rollup_grouping_hum_bind--Results] [GOOD] >> test.py::test[aggregate-list_with_fold_map--ForceBlocks] >> test.py::test[select-unlabeled_1000--Results] [GOOD] >> test.py::test[select_yql-statements-default.txt-Results] [SKIPPED] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt-Results] >> test.py::test[join-pullup_extra_columns--Results] [GOOD] >> test.py::test[join-pullup_null_column--ForceBlocks] >> test.py::test[tpch-q16-default.txt-Results] [GOOD] >> test.py::test[tpch-q19-default.txt-Results] >> test.py::test[key_filter-contains_tuples-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-contains_tuples-default.txt-Results] >> test.py::test[join-inner_all--Results] [GOOD] >> test.py::test[join-join_comp_common_table--ForceBlocks] >> test.py::test[join-grace_join1--ForceBlocks] [GOOD] >> test.py::test[join-grace_join1--Results] [SKIPPED] >> test.py::test[join-join_key_cmp_udf-off-ForceBlocks] >> test.py::test[limit-sort_calc_limit--Results] [GOOD] >> test.py::test[lineage-reduce_all-default.txt-Results] [SKIPPED] >> test.py::test[lineage-reduce_all_row-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-insert_fill--Results] >> test.py::test[join-premap_common_right_tablecontent--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_right_tablecontent--Results] >> test.py::test[optimizers-test_no_aggregate_split--Results] [GOOD] >> test.py::test[optimizers-yql-14279_keyextract_with_world_dep--Results] [SKIPPED] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Results] >> test.py::test[multicluster-insert_fill--Results] [SKIPPED] >> test.py::test[multicluster-remote_tc_with_auto-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-combinebykey_fields_subset_range--Results] >> test.py::test[optimizers-direct_row_after_merge--Results] [GOOD] >> test.py::test[optimizers-length_over_merge_fs_multiusage--ForceBlocks] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] >> test.py::test[action-insert_after_eval--Results] [GOOD] >> test.py::test[action-parallel_for-default.txt-Results] >> test.py::test[produce-reduce_multi_in_keytuple--Results] [GOOD] >> test.py::test[produce-reduce_multi_in_presort--Results] >> test.py::test[optimizers-length_over_merge--ForceBlocks] [GOOD] >> test.py::test[optimizers-length_over_merge--Results] >> test.py::test[order_by-literal--Results] [GOOD] >> test.py::test[order_by-native_desc_assume_with_transform--Results] [SKIPPED] >> test.py::test[order_by-native_desc_sort_with_limit--Results] [SKIPPED] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-Results] >> test.py::test[binding-bind_select-default.txt-Results] [GOOD] >> test.py::test[binding-drop_binding--Results] >> test.py::test[aggregate-percentile_interval-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-percentile_interval-default.txt-Results] >> test.py::test[case-case_then_else-default.txt-ForceBlocks] [GOOD] >> test.py::test[case-case_then_else-default.txt-Results] >> test.py::test[pg-tpcds-q07-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q14-default.txt-Results] >> test.py::test[blocks-sub_uint64_opt2--ForceBlocks] >> test.py::test[join-yql-12022-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-12022-off-Results] [SKIPPED] >> test.py::test[json-json_value/example--ForceBlocks] >> test.py::test[pg-drop_table--ForceBlocks] [GOOD] >> test.py::test[pg-drop_table--Results] >> test.py::test[schema-select_all_inferschema_op_custom_tmp--Results] [GOOD] >> test.py::test[order_by-order_by_expr_over_sorted_table--Results] [GOOD] >> test.py::test[order_by-order_by_list_of_strings--Results] >> test.py::test[schema-select_simple-default.txt-ForceBlocks] >> test.py::test[optimizers-length_over_merge--Results] [GOOD] >> test.py::test[optimizers-nonselected_direct_row--ForceBlocks] >> test.py::test[key_filter-contains_tuples-default.txt-Results] [GOOD] >> test.py::test[key_filter-decimal--ForceBlocks] >> test.py::test[select-host_count--ForceBlocks] [GOOD] >> test.py::test[select-host_count--Results] >> test.py::test[join-premap_common_right_tablecontent--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--ForceBlocks] >> test.py::test[select-select_all_filtered-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-select_all_filtered-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_all_2-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Results] >> test.py::test[case-case_then_else-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_group-hint_dup_col_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_dup_col_fail--Results] [SKIPPED] >> test.py::test[column_group-insert_diff_groups3_fail--ForceBlocks] [SKIPPED] >> test.py::test[column_group-insert_diff_groups3_fail--Results] [SKIPPED] >> test.py::test[column_order-ordered_plus_native--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-Results] >> test.py::test[tpch-q17-default.txt-Results] [GOOD] >> test.py::test[tpch-q18-default.txt-Results] >> test.py::test[join-grace_join1-off-ForceBlocks] [GOOD] >> test.py::test[join-grace_join1-off-Results] [SKIPPED] >> test.py::test[join-inner_trivial--ForceBlocks] >> test.py::test[blocks-decimal_avg--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_join--ForceBlocks] [GOOD] >> test.py::test[blocks-decimal_avg--Results] >> test.py::test[weak_field-weak_field_join--Results] >> test.py::test[pg-drop_table--Results] [GOOD] >> test.py::test[pg-select_from_columns_qstar-default.txt-ForceBlocks] |54.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_data_from_source.cpp |54.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/data_sharing/destination/transactions/tx_data_from_source.cpp >> test.py::test[view-system_udf--ForceBlocks] [GOOD] >> test.py::test[view-system_udf--Results] >> test.py::test[flatten_by-flatten_dict_by_opt--Results] [GOOD] >> test.py::test[flatten_by-flatten_expr_join--ForceBlocks] [SKIPPED] >> test.py::test[flatten_by-flatten_expr_join--Results] [SKIPPED] >> test.py::test[flatten_by-flatten_mode-default.txt-ForceBlocks] |54.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |54.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |54.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a >> test.py::test[join-pullup_null_column--ForceBlocks] [GOOD] >> test.py::test[join-pullup_null_column--Results] >> test.py::test[aggregate-list_with_fold_map--ForceBlocks] [GOOD] >> test.py::test[aggregate-list_with_fold_map--Results] >> test.py::test[binding-drop_binding--Results] [GOOD] >> test.py::test[blocks-add_uint64_opt2--Results] >> test.py::test[select-select_all_filtered-default.txt-Results] [GOOD] >> test.py::test[select-select_all_group_by_column--ForceBlocks] >> test.py::test[order_by-sort_decimals--ForceBlocks] [GOOD] >> test.py::test[order_by-sort_decimals--Results] |54.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/tablet/write_queue.cpp |54.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/tablet/write_queue.cpp >> test.py::test[tpch-q19-default.txt-Results] [GOOD] >> test.py::test[type_v3-insert_struct_v3_with_native--Results] |54.3%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |54.3%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |54.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a >> test.py::test[pg-tpcds-q24-default.txt-Results] >> test.py::test[aggregate-percentile_interval-default.txt-Results] [GOOD] >> test.py::test[aggregate-too_wide-default.txt-ForceBlocks] >> test.py::test[select-host_count--Results] [GOOD] >> test.py::test[join-join_key_cmp_udf-off-ForceBlocks] [GOOD] >> test.py::test[join-join_key_cmp_udf-off-Results] [SKIPPED] >> test.py::test[join-left_all-off-ForceBlocks] >> test.py::test[optimizers-combinebykey_fields_subset_range--Results] [GOOD] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] >> test.py::test[pg-tpch-q08-default.txt-Results] [GOOD] >> test.py::test[pg-wide_sort--ForceBlocks] >> test.py::test[select-result_size_limit--ForceBlocks] [SKIPPED] >> test.py::test[select-result_size_limit--Results] [SKIPPED] >> test.py::test[select-scalar_subquery_with_star-default.txt-ForceBlocks] >> test.py::test[optimizers-length_over_merge_fs_multiusage--ForceBlocks] [GOOD] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] >> test.py::test[view-system_udf--Results] [GOOD] >> test.py::test[weak_field-weak_field--ForceBlocks] >> test.py::test[ql_filter-integer_many_right--ForceBlocks] >> test.py::test[join-join_left_cbo--ForceBlocks] |54.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write_index.cpp |54.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_write_index.cpp >> test.py::test[aggregate-group_by_ru_join_grouping-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_session_aliases--Results] >> test.py::test[blocks-decimal_avg--Results] [GOOD] >> test.py::test[blocks-json_document_type--ForceBlocks] >> test.py::test[weak_field-weak_field_join--Results] [GOOD] >> test.py::test[weak_field-weak_field_join_condition--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_subreq_all_key_without-default.txt-Results] [GOOD] >> test.py::test[window-distinct_over_window_full_frames--ForceBlocks] [GOOD] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-ForceBlocks] >> test.py::test[window-distinct_over_window_full_frames--Results] >> test.py::test[action-parallel_for-default.txt-Results] [GOOD] >> test.py::test[action-pending_arg_fail--Results] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Results] [GOOD] >> test.py::test[optimizers-yql-7767_key_filter_with_view--Results] >> test.py::test[aggregate-list_with_fold_map--Results] [GOOD] >> test.py::test[blocks-add_decimal--ForceBlocks] >> test.py::test[join-pullup_null_column--Results] [GOOD] >> test.py::test[join-pullup_null_column-off-ForceBlocks] >> test.py::test[blocks-sub_uint64_opt2--ForceBlocks] [GOOD] >> test.py::test[blocks-sub_uint64_opt2--Results] >> CompactionThrottler::CompactionLoad [GOOD] >> CompDefrag::DoesItWork |54.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_draft.cpp |54.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_draft.cpp >> test.py::test[simple_columns-simple_columns_join_coalesce_without_1-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-Results] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] [GOOD] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-ForceBlocks] >> test.py::test[order_by-sort_decimals--Results] [GOOD] >> test.py::test[order_by-sort_simple--ForceBlocks] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-Results] [GOOD] >> test.py::test[optimizers-nonselected_direct_row--ForceBlocks] [GOOD] >> test.py::test[optimizers-nonselected_direct_row--Results] >> test.py::test[order_by-order_by_list_of_strings--Results] [GOOD] >> test.py::test[order_by-order_by_tablerow_column--Results] >> test.py::test[schema-select_simple-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-select_simple-default.txt-Results] >> test.py::test[order_by-order_by_num_key_and_subkey--Results] >> test.py::test[produce-reduce_multi_in_presort--Results] [GOOD] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--Results] >> test.py::test[pg-select_from_columns_qstar-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_from_columns_qstar-default.txt-Results] >> test.py::test[join-inner_trivial--ForceBlocks] [GOOD] >> test.py::test[join-inner_trivial--Results] >> test.py::test[blocks-sub_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-type_and_callable_stats--ForceBlocks] >> test.py::test[column_order-ordered_plus_native--ForceBlocks] [GOOD] >> test.py::test[column_order-ordered_plus_native--Results] >> test.py::test[order_by-order_by_missing_project_column-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_udf_duo--Results] >> test.py::test[action-pending_arg_fail--Results] [GOOD] >> test.py::test[aggr_factory-count-default.txt-Results] |54.3%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_remove_blobs.cpp |54.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_remove_blobs.cpp >> test.py::test[flatten_by-flatten_mode-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_mode-default.txt-Results] >> test.py::test[blocks-add_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-block_input_various_types--Results] [SKIPPED] >> test.py::test[blocks-coalesce_ints--Results] >> test.py::test[pg-tpcds-q24-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q37-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q14-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q15-default.txt-Results] >> test.py::test[schema-select_simple-default.txt-Results] [GOOD] >> test.py::test[schema-skip_complex_type--ForceBlocks] >> test.py::test[optimizers-nonselected_direct_row--Results] [GOOD] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-ForceBlocks] >> test.py::test[select-select_all_group_by_column--ForceBlocks] [GOOD] >> test.py::test[select-select_all_group_by_column--Results] >> test.py::test[optimizers-length_over_merge_fs_multiusage--Results] [GOOD] >> test.py::test[optimizers-remove_keep_sorted_setting--Results] >> test.py::test[pg-select_from_columns_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-select_unionall_self-default.txt-ForceBlocks] >> test.py::test[type_v3-insert_struct_v3_with_native--Results] [GOOD] >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] >> test.py::test[select-scalar_subquery_with_star-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-scalar_subquery_with_star-default.txt-Results] >> test.py::test[pg-wide_sort--ForceBlocks] [GOOD] >> test.py::test[pg-wide_sort--Results] >> test.py::test[join-left_all-off-ForceBlocks] [GOOD] >> test.py::test[join-left_all-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner--ForceBlocks] >> test.py::test[join-join_comp_common_table--ForceBlocks] [GOOD] >> test.py::test[join-join_comp_common_table--Results] >> test.py::test[weak_field-weak_field--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field--Results] >> test.py::test[flatten_by-flatten_mode-default.txt-Results] [GOOD] >> test.py::test[column_order-ordered_plus_native--Results] [GOOD] >> test.py::test[in-in_compact_distinct-empty-ForceBlocks] >> test.py::test[json-json_value/example--ForceBlocks] [GOOD] >> test.py::test[json-json_value/example--Results] >> test.py::test[datetime-date_tz_table_sort_asc--ForceBlocks] >> test.py::test[join-pushdown_filter_over_inner_with_strict_udf--Results] [GOOD] >> test.py::test[join-simple_columns_partial--ForceBlocks] >> test.py::test[join-inner_trivial--Results] [GOOD] >> test.py::test[join-inner_trivial-off-ForceBlocks] >> test.py::test[join-join_left_cbo--ForceBlocks] [GOOD] >> test.py::test[join-join_left_cbo--Results] >> test.py::test[ql_filter-integer_many_right--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_many_right--Results] >> test.py::test[key_filter-decimal--ForceBlocks] [GOOD] >> test.py::test[key_filter-decimal--Results] >> test.py::test[blocks-add_decimal--ForceBlocks] [GOOD] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-ForceBlocks] [GOOD] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-Results] [SKIPPED] >> test.py::test[table_range-concat_sorted_with_key_diff--ForceBlocks] >> test.py::test[blocks-json_document_type--ForceBlocks] [GOOD] >> test.py::test[blocks-json_document_type--Results] >> test.py::test[blocks-add_decimal--Results] >> test.py::test[join-mapjoin_on_tablerecord--ForceBlocks] [SKIPPED] >> test.py::test[join-mapjoin_on_tablerecord--Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align2-off-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_2-default.txt-Results] [GOOD] >> test.py::test[select-select_all_group_by_column--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt-Results] >> test.py::test[select-shift_columns-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_tablerow_column--Results] [GOOD] >> test.py::test[weak_field-weak_field_join_condition--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_join_condition--Results] >> test.py::test[order_by-order_by_tuple_expr-default.txt-Results] >> test.py::test[select-scalar_subquery_with_star-default.txt-Results] [GOOD] >> test.py::test[join-pullup_null_column-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_null_column-off-Results] [SKIPPED] >> test.py::test[join-star_join_inners-off-ForceBlocks] >> test.py::test[table_range-range_over_regexp--ForceBlocks] >> test.py::test[pg-wide_sort--Results] [GOOD] >> test.py::test[produce-process_multi_out_bad_count_fail--ForceBlocks] >> test.py::test[optimizers-yql-7767_key_filter_with_view--Results] [GOOD] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Results] >> test.py::test[ql_filter-integer_many_right--Results] [GOOD] >> test.py::test[ql_filter-integer_members--ForceBlocks] >> test.py::test[hor_join-yield_off--Results] [SKIPPED] >> test.py::test[in-in_immediate_subquery-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field--Results] [GOOD] >> test.py::test[weak_field-weak_field_wrong_types_fail--ForceBlocks] >> test.py::test[order_by-order_by_num_key_and_subkey--Results] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--Results] >> test.py::test[tpch-q18-default.txt-Results] [GOOD] >> test.py::test[type_v3-YQL-20213--Results] >> test.py::test[pg-tpcds-q15-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q33-default.txt-Results] >> test.py::test[blocks-add_decimal--Results] [GOOD] >> test.py::test[blocks-block_input_per_cluster--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_per_cluster--Results] [SKIPPED] >> test.py::test[blocks-block_input_various_types_2--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_various_types_2--Results] [SKIPPED] >> test.py::test[blocks-combine_all_count--ForceBlocks] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-Results] >> test.py::test[aggregate-group_by_session_aliases--Results] [GOOD] >> test.py::test[aggregate-group_by_session_distinct--Results] |54.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/tier/write.cpp |54.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/write.cpp |54.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/tier/adapter.cpp >> test.py::test[window-distinct_over_window_full_frames--Results] [GOOD] >> test.py::test[window-empty/aggregations--ForceBlocks] |54.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/adapter.cpp >> test.py::test[order_by-order_by_udf_duo--Results] [GOOD] >> test.py::test[order_by-yql-19598--Results] >> test.py::test[blocks-json_document_type--Results] [GOOD] >> test.py::test[blocks-pg_to_strings--ForceBlocks] >> test.py::test[join-join_left_cbo--Results] [GOOD] >> test.py::test[join-join_right_cbo--ForceBlocks] >> test.py::test[pg-tpcds-q40-default.txt-Results] |54.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/changes/with_appended.cpp |54.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/changes/with_appended.cpp >> test.py::test[pg-tpcds-q37-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q37-default.txt-Results] >> test.py::test[key_filter-decimal--Results] [GOOD] >> test.py::test[lineage-reduce_all-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-reduce_all-default.txt-Results] [SKIPPED] >> test.py::test[json-json_value/example--Results] [GOOD] >> test.py::test[key_filter-between_with_key_filter--ForceBlocks] >> test.py::test[blocks-type_and_callable_stats--ForceBlocks] [GOOD] >> test.py::test[blocks-type_and_callable_stats--Results] >> test.py::test[lineage-select_field_limit_offset-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_field_limit_offset-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_join-default.txt-ForceBlocks] >> test.py::test[lineage-select_join-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_join-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_mix_fields-default.txt-ForceBlocks] |54.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |54.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a >> test.py::test[aggr_factory-count-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-variance-default.txt-Results] >> test.py::test[blocks-coalesce_ints--Results] [GOOD] >> test.py::test[blocks-combine_all_decimal--Results] >> test.py::test[lineage-select_mix_fields-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_mix_fields-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_table_row-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_table_row-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-local_tc_with_force-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[multicluster-local_tc_with_force-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-test_no_aggregate_split--ForceBlocks] >> test.py::test[order_by-sort_simple--ForceBlocks] [GOOD] >> test.py::test[order_by-sort_simple--Results] >> test.py::test[schema-skip_complex_type--ForceBlocks] [GOOD] >> test.py::test[schema-skip_complex_type--Results] |54.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a >> test.py::test[pg-select_unionall_self-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_unionall_self-default.txt-Results] >> test.py::test[produce-process_multi_out_bad_count_fail--ForceBlocks] [GOOD] >> test.py::test[produce-process_multi_out_bad_count_fail--Results] [GOOD] >> test.py::test[produce-reduce_with_python_filter_and_having--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_filter_and_having--Results] >> test.py::test[action-action_eval_cluster_use_compact_named_exprs--Results] [SKIPPED] >> test.py::test[weak_field-weak_field_join_condition--Results] [GOOD] >> test.py::test[window-current/aggregations_leadlag--ForceBlocks] >> test.py::test[produce-reduce_with_python_filter_and_having--Results] [SKIPPED] >> test.py::test[ql_filter-integer_many_left--ForceBlocks] >> test.py::test[action-nested_subquery--Results] >> test.py::test[produce-reduce_multi_in_sampling-sorted-Results] [GOOD] >> test.py::test[produce-reduce_with_assume_in_subquery--Results] [SKIPPED] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] >> test.py::test[weak_field-weak_field_wrong_types_fail--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_wrong_types_fail--Results] [GOOD] >> test.py::test[window-generic/aggregations_include_current--ForceBlocks] >> test.py::test[pg-tpcds-q37-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q51-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-18300-flatmap-over-extend-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-7324_duplicate_arg--ForceBlocks] >> test.py::test[join-lookupjoin_inner--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner--Results] >> test.py::test[type_v3-insert_struct_v3_wo_native--Results] [GOOD] >> test.py::test[udf-two_regexps--Results] >> test.py::test[blocks-type_and_callable_stats--Results] [GOOD] >> test.py::test[column_group-hint_append2--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_append2--Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail5--ForceBlocks] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail5--Results] [SKIPPED] >> test.py::test[column_group-length-single-ForceBlocks] [SKIPPED] >> test.py::test[column_group-length-single-Results] [SKIPPED] >> test.py::test[in-in_compact_distinct-empty-ForceBlocks] [GOOD] >> test.py::test[in-in_compact_distinct-empty-Results] >> test.py::test[order_by-order_by_tuple_expr-default.txt-Results] [GOOD] >> test.py::test[order_by-singular-default.txt-Results] [SKIPPED] >> test.py::test[column_order-select_plain-default.txt-ForceBlocks] >> test.py::test[pg-select_unionall_self-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q10-default.txt-ForceBlocks] >> test.py::test[order_by-sort_decimals--Results] >> test.py::test[join-inner_trivial-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_trivial-off-Results] [SKIPPED] >> test.py::test[join-join_without_correlation_names--ForceBlocks] >> test.py::test[schema-skip_complex_type--Results] [GOOD] >> test.py::test[select-append_to_value--ForceBlocks] |54.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/loading/stages.cpp >> test.py::test[table_range-concat_sorted_with_key_diff--ForceBlocks] [GOOD] >> test.py::test[table_range-concat_sorted_with_key_diff--Results] >> test.py::test[select-shift_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-shift_columns-default.txt-Results] >> test.py::test[multicluster-basic-default.txt-Results] [SKIPPED] >> test.py::test[multicluster-partition_by_key_force--Results] [SKIPPED] >> test.py::test[optimizers-group_visit_lambdas--Results] |54.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/loading/stages.cpp |54.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |54.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |54.4%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a >> test.py::test[order_by-sort_simple--Results] [GOOD] >> test.py::test[pg-in_mixed--ForceBlocks] >> test.py::test[datetime-date_tz_table_sort_asc--ForceBlocks] [GOOD] >> test.py::test[datetime-date_tz_table_sort_asc--Results] >> test.py::test[table_range-range_over_regexp--ForceBlocks] [GOOD] >> test.py::test[table_range-range_over_regexp--Results] >> test.py::test[optimizers-remove_keep_sorted_setting--Results] [GOOD] >> test.py::test[optimizers-sorted_scalar_content--Results] |54.4%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/read_metadata.cpp |54.4%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/read_metadata.cpp >> test.py::test[optimizers-sorted_scalar_content--Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_window_no_payloads--Results] |54.4%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |54.4%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |54.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a >> test.py::test[pg-tpcds-q33-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q49-default.txt-Results] >> test.py::test[pg-tpcds-q40-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q50-default.txt-Results] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--Results] [GOOD] >> test.py::test[pg-select_columnref1-default.txt-Results] >> test.py::test[join-simple_columns_partial--ForceBlocks] [GOOD] >> test.py::test[join-simple_columns_partial--Results] >> test.py::test[type_v3-YQL-20213--Results] [GOOD] >> test.py::test[type_v3-float--Results] >> test.py::test[in-in_compact_distinct-empty-Results] [GOOD] >> test.py::test[insert-append_sorted--ForceBlocks] >> test.py::test[order_by-yql-19598--Results] [GOOD] >> test.py::test[pg-drop_table--Results] >> test.py::test[ql_filter-integer_members--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_members--Results] >> test.py::test[join-mergejoin_force_align2-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_force_align2-off-Results] [SKIPPED] >> test.py::test[join-nopushdown_filter_with_depends_on-off-ForceBlocks] >> test.py::test[select-shift_columns-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_sorted_with_key_diff--Results] [GOOD] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-ForceBlocks] >> test.py::test[select-table_content_with_tmp_folder--ForceBlocks] >> test.py::test[join-lookupjoin_inner--Results] [GOOD] >> test.py::test[join-lookupjoin_semi_1o2o--ForceBlocks] >> test.py::test[action-nested_subquery--Results] [GOOD] >> test.py::test[aggr_factory-avg_if-default.txt-Results] >> test.py::test[table_range-range_over_regexp--Results] [GOOD] >> test.py::test[blocks-combine_all_count--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_count--Results] >> test.py::test[join-star_join_inners-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners-off-Results] [SKIPPED] >> test.py::test[join-yql-14829_leftonly--ForceBlocks] >> test.py::test[tpch-q6-default.txt-ForceBlocks] >> test.py::test[join-join_right_cbo--ForceBlocks] [GOOD] >> test.py::test[join-join_right_cbo--Results] >> test.py::test[datetime-date_tz_table_sort_asc--Results] [GOOD] >> test.py::test[expr-inline_call--ForceBlocks] >> test.py::test[key_filter-between_with_key_filter--ForceBlocks] [GOOD] >> test.py::test[key_filter-between_with_key_filter--Results] >> test.py::test[in-in_immediate_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_immediate_subquery-default.txt-Results] >> test.py::test[ql_filter-integer_many_left--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_many_left--Results] >> test.py::test[simple_columns-simple_columns_join_qualified-default.txt-Results] [GOOD] >> test.py::test[tpch-q10-default.txt-Results] >> test.py::test[ql_filter-integer_members--Results] [GOOD] >> test.py::test[schema-insert-row_spec-ForceBlocks] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-pg_to_strings--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-Results] >> test.py::test[blocks-pg_to_strings--Results] >> test.py::test[window-empty/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-empty/aggregations--Results] >> test.py::test[udf-two_regexps--Results] [GOOD] >> test.py::test[view-system_udf--Results] >> test.py::test[produce-reduce_with_flat_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_python_having--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_input_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_row_repack--Results] [SKIPPED] >> test.py::test[ql_filter-integer_members--Results] >> test.py::test[blocks-combine_all_count--Results] [GOOD] >> test.py::test[blocks-combine_all_sum_filter--ForceBlocks] >> test.py::test[optimizers-yql-7324_duplicate_arg--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-7324_duplicate_arg--Results] >> test.py::test[pg-tpcds-q51-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q51-default.txt-Results] >> test.py::test[join-pullup_random-off-Results] >> test.py::test[join-inner_with_order--Results] >> test.py::test[join-pullup_random-off-Results] [SKIPPED] >> test.py::test[join-split_to_list_as_key--Results] >> test.py::test[key_filter-between_with_key_filter--Results] [GOOD] >> test.py::test[key_filter-contains_optional--ForceBlocks] >> test.py::test[optimizers-test_no_aggregate_split--ForceBlocks] [GOOD] >> test.py::test[optimizers-test_no_aggregate_split--Results] >> test.py::test[pg-tpcds-q50-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q69-default.txt-Results] >> test.py::test[ql_filter-integer_many_left--Results] [GOOD] >> test.py::test[ql_filter-integer_single_disable_prune--ForceBlocks] >> test.py::test[select-append_to_value--ForceBlocks] [GOOD] >> test.py::test[select-append_to_value--Results] >> test.py::test[join-join_right_cbo--Results] [GOOD] >> test.py::test[join-left_join_null_column--ForceBlocks] >> test.py::test[optimizers-unused_columns_window_no_payloads--Results] [GOOD] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Results] >> test.py::test[aggregate-list_with_fold_map--Results] >> test.py::test[pg-tpcds-q10-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q10-default.txt-Results] >> test.py::test[pg-in_mixed--ForceBlocks] [GOOD] >> test.py::test[pg-in_mixed--Results] >> test.py::test[pg-select_columnref1-default.txt-Results] [GOOD] >> test.py::test[pg-select_common_type_unionall--Results] >> test.py::test[join-join_without_correlation_names--ForceBlocks] [GOOD] >> test.py::test[join-join_without_correlation_names--Results] >> test.py::test[optimizers-yql_5830_fuse_outer_with_extra_deps--Results] [GOOD] >> test.py::test[order_by-changed_sort_with_limit--Results] [SKIPPED] >> test.py::test[order_by-literal_complex--Results] [SKIPPED] >> test.py::test[order_by-sort--Results] >> test.py::test[pg-drop_table--Results] [GOOD] >> test.py::test[pg-tpcds-q06-default.txt-Results] >> test.py::test[blocks-pg_to_strings--Results] [GOOD] >> test.py::test[column_order-union_all_positional_columns_count_fail--ForceBlocks] |54.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/hooks/testing/ro_controller.cpp |54.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/hooks/testing/ro_controller.cpp >> test.py::test[type_v3-float--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-opt-Results] >> test.py::test[window-current/aggregations_leadlag--ForceBlocks] [GOOD] >> test.py::test[window-current/aggregations_leadlag--Results] >> test.py::test[blocks-combine_all_decimal--Results] [GOOD] >> test.py::test[blocks-combine_all_min_filter--Results] >> test.py::test[order_by-sort_decimals--Results] [GOOD] >> test.py::test[order_by-sort_with_take--Results] >> test.py::test[optimizers-yql-7324_duplicate_arg--Results] [GOOD] >> test.py::test[pg-tpcds-q17-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q51-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q59-default.txt-ForceBlocks] >> test.py::test[optimizers-group_visit_lambdas--Results] [GOOD] >> test.py::test[optimizers-pushdown_nonsep_over_aggregate--Results] [SKIPPED] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Results] >> test.py::test[join-join_comp_common_table--Results] [GOOD] >> test.py::test[join-join_without_column-off-ForceBlocks] >> test.py::test[in-in_immediate_subquery-default.txt-Results] [GOOD] >> test.py::test[insert-merge_publish--ForceBlocks] >> test.py::test[join-nopushdown_filter_with_depends_on-off-ForceBlocks] [GOOD] >> test.py::test[join-nopushdown_filter_with_depends_on-off-Results] [SKIPPED] >> test.py::test[join-premap_map_semi-off-ForceBlocks] >> test.py::test[join-simple_columns_partial--Results] [GOOD] >> test.py::test[join-yql-10654_pullup_with_sys_columns--ForceBlocks] >> test.py::test[pg-tpcds-q49-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q77-default.txt-Results] >> test.py::test[column_order-select_plain-default.txt-ForceBlocks] [GOOD] >> test.py::test[column_order-select_plain-default.txt-Results] >> test.py::test[select-append_to_value--Results] [GOOD] >> test.py::test[select-boolean_where--ForceBlocks] >> test.py::test[join-lookupjoin_semi_1o2o--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_1o2o--Results] >> test.py::test[pg-in_mixed--Results] [GOOD] >> test.py::test[pg-select_yql_type--ForceBlocks] >> test.py::test[pg-tpcds-q10-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q29-default.txt-ForceBlocks] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-Results] >> test.py::test[window-empty/aggregations--Results] [GOOD] >> test.py::test[window-full/aggregations_leadlag--ForceBlocks] >> test.py::test[expr-inline_call--ForceBlocks] [GOOD] >> test.py::test[expr-inline_call--Results] >> test.py::test[aggregate-group_by_session_distinct--Results] [GOOD] >> test.py::test[aggregate-group_by_session_only--Results] >> test.py::test[aggr_factory-variance-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Results] >> test.py::test[select-table_content_with_tmp_folder--ForceBlocks] [GOOD] >> test.py::test[select-table_content_with_tmp_folder--Results] |54.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_indexed.cpp |54.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_gc_indexed.cpp >> test.py::test[aggregate-too_wide-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-too_wide-default.txt-Results] >> test.py::test[tpch-q6-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q6-default.txt-Results] >> test.py::test[optimizers-test_no_aggregate_split--Results] [GOOD] >> test.py::test[column_order-union_all_positional_columns_count_fail--ForceBlocks] [GOOD] >> test.py::test[column_order-union_all_positional_columns_count_fail--Results] [GOOD] >> test.py::test[count-count_distinct_from_view_concat--ForceBlocks] >> test.py::test[ql_filter-integer_members--Results] [GOOD] >> test.py::test[ql_filter-integer_single_disable_prune--Results] >> test.py::test[join-yql-14829_leftonly--ForceBlocks] [GOOD] >> test.py::test[join-yql-14829_leftonly--Results] >> test.py::test[view-system_udf--Results] [GOOD] >> test.py::test[weak_field-weak_field_aggregation--Results] >> test.py::test[column_group-hint_anon-disable-Results] [SKIPPED] >> test.py::test[column_group-hint_append_fail-diff_grp-Results] [SKIPPED] >> test.py::test[column_order-select_action-default.txt-Results] >> test.py::test[join-join_without_correlation_names--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_no_opt--ForceBlocks] |54.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[optimizers-test_no_aggregate_split--Results] [GOOD] >> test.py::test[schema-insert-row_spec-ForceBlocks] [GOOD] >> test.py::test[expr-inline_call--Results] [GOOD] >> test.py::test[expr-tagged_runtime-default.txt-ForceBlocks] >> test.py::test[schema-insert-row_spec-Results] >> test.py::test[join-lookupjoin_semi_1o2o--Results] [GOOD] >> test.py::test[join-lookupjoin_with_cache--ForceBlocks] >> test.py::test[pg-tpcds-q69-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q09-default.txt-Results] >> test.py::test[window-generic/aggregations_include_current--ForceBlocks] [GOOD] >> test.py::test[window-generic/aggregations_include_current--Results] >> test.py::test[aggr_factory-avg_if-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-multi--Results] >> test.py::test[pg-tpcds-q06-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q12-default.txt-Results] >> test.py::test[union_all-inner_union_all_with_limits-default.txt-Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_yson--ForceBlocks] >> test.py::test[column_order-select_plain-default.txt-Results] [GOOD] >> test.py::test[count-count_all-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_sum_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_sum_filter--Results] >> test.py::test[select-table_content_with_tmp_folder--Results] [GOOD] >> test.py::test[select-trivial_group_by-default.txt-ForceBlocks] >> test.py::test[ql_filter-integer_single_disable_prune--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_single_disable_prune--Results] >> test.py::test[tpch-q6-default.txt-Results] [GOOD] >> test.py::test[type_v3-decimal_yt--ForceBlocks] >> test.py::test[order_by-sort--Results] [GOOD] >> test.py::test[pg-in_mixed--Results] >> test.py::test[window-current/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-current/session_aliases--ForceBlocks] >> test.py::test[order_by-sort_with_take--Results] [GOOD] >> test.py::test[order_by-sort_with_take_limit--Results] >> test.py::test[key_filter-contains_optional--ForceBlocks] [GOOD] >> test.py::test[key_filter-contains_optional--Results] >> test.py::test[type_v3-ignore_v3_hint-opt-Results] [GOOD] >> test.py::test[type_v3-ignore_v3_pragma--Results] >> test.py::test[pg-tpcds-q77-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q98-default.txt-Results] >> test.py::test[join-left_join_null_column--ForceBlocks] [GOOD] >> test.py::test[join-left_join_null_column--Results] >> test.py::test[schema-insert-row_spec-Results] [GOOD] >> test.py::test[schema-select_all-row_spec_extra_sort-ForceBlocks] >> test.py::test[optimizers-yql-18408_filter_multiusage_pushdown-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-6008_limit_after_map--ForceBlocks] >> test.py::test[join-inner_with_order--Results] [GOOD] >> test.py::test[join-inner_with_order-off-Results] [SKIPPED] >> test.py::test[join-join_and_distinct_key-off-Results] [SKIPPED] >> test.py::test[join-join_without_correlation_names--Results] >> test.py::test[insert-append_sorted--ForceBlocks] [GOOD] >> test.py::test[insert-append_sorted--Results] >> test.py::test[pg-tpcds-q59-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q59-default.txt-Results] >> test.py::test[blocks-combine_all_min_filter--Results] [GOOD] >> test.py::test[blocks-combine_all_min_filter_opt--Results] >> test.py::test[ql_filter-integer_single_disable_prune--Results] [GOOD] >> test.py::test[sampling-join_left_sample-default.txt-ForceBlocks] >> test.py::test[tpch-q10-default.txt-Results] [GOOD] >> test.py::test[tpch-q11-default.txt-Results] >> test.py::test[aggregate-list_with_fold_map--Results] [GOOD] >> test.py::test[bigdate-table_int_cast-default.txt-Results] >> test.py::test[blocks-combine_all_sum_filter--Results] [GOOD] >> test.py::test[blocks-compare--ForceBlocks] >> test.py::test[pg-tpcds-q17-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q17-default.txt-Results] >> test.py::test[optimizers-yql-10070_extract_members_over_calcoverwindow-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill--Results] >> test.py::test[optimizers-yql-11171_unordered_over_sorted_fill--Results] [SKIPPED] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] >> test.py::test[join-premap_map_semi-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_map_semi-off-Results] [SKIPPED] >> test.py::test[join-premap_merge_extrasort2-off-ForceBlocks] >> test.py::test[join-yql-14829_leftonly--Results] [GOOD] >> test.py::test[join-yql-8125--ForceBlocks] >> test.py::test[select-boolean_where--ForceBlocks] [GOOD] >> test.py::test[select-boolean_where--Results] >> test.py::test[join-join_without_column-off-ForceBlocks] [GOOD] >> test.py::test[join-join_without_column-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_subq--ForceBlocks] >> test.py::test[key_filter-contains_optional--Results] [GOOD] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-ForceBlocks] >> test.py::test[ql_filter-integer_single_disable_prune--Results] [GOOD] >> test.py::test[result_types-data-default.txt-Results] >> test.py::test[pg-select_yql_type--ForceBlocks] [GOOD] >> test.py::test[pg-select_yql_type--Results] >> test.py::test[pg-tpcds-q29-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q29-default.txt-Results] >> test.py::test[join-yql-10654_pullup_with_sys_columns--ForceBlocks] [GOOD] >> test.py::test[join-yql-10654_pullup_with_sys_columns--Results] >> test.py::test[join-split_to_list_as_key--Results] [GOOD] >> test.py::test[join-star_join_inners--Results] |54.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/tier/storage.cpp |54.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/storage.cpp >> test.py::test[pg-tpcds-q59-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q91-default.txt-ForceBlocks] >> test.py::test[insert-append_sorted--Results] [GOOD] >> test.py::test[insert-keepmeta_nonstrict_fail--ForceBlocks] >> test.py::test[pg-tpcds-q17-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q12-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q13-default.txt-Results] >> test.py::test[aggregate-group_by_session_only--Results] [GOOD] >> test.py::test[aggregate-having_distinct_expr--Results] >> test.py::test[window-generic/aggregations_include_current--Results] [GOOD] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-ForceBlocks] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-2171_aggregate_desc_sort_and_extract--Results] [SKIPPED] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Results] >> test.py::test[aggregate-group_by_column_alias_reuse-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_alt_duo--Results] >> test.py::test[column_order-select_action-default.txt-Results] [GOOD] >> test.py::test[count-count_all-default.txt-Results] >> test.py::test[join-left_join_right_pushdown_no_opt--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_no_opt--Results] >> test.py::test[join-left_join_null_column--Results] [GOOD] >> test.py::test[join-left_join_right_pushdown_simple--ForceBlocks] >> test.py::test[weak_field-weak_field_aggregation--Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_yson--Results] >> test.py::test[count-count_distinct_from_view_concat--ForceBlocks] [GOOD] >> test.py::test[count-count_distinct_from_view_concat--Results] >> test.py::test[pg-tpcds-q29-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q61-default.txt-ForceBlocks] >> test.py::test[expr-tagged_runtime-default.txt-ForceBlocks] [GOOD] >> test.py::test[expr-tagged_runtime-default.txt-Results] >> test.py::test[insert-merge_publish--ForceBlocks] [GOOD] >> test.py::test[insert-merge_publish--Results] >> test.py::test[pg-in_mixed--Results] [GOOD] >> test.py::test[pg-select_from_columns_qstar-default.txt-Results] |54.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part3/pytest >> test.py::test[pg-tpcds-q17-default.txt-Results] [GOOD] >> test.py::test[select-boolean_where--Results] [GOOD] >> test.py::test[select-create_tuples-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q98-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q04-default.txt-Results] >> test.py::test[pg-select_yql_type--Results] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_esc_yson--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_esc_yson--Results] >> test.py::test[type_v3-decimal_yt--ForceBlocks] [GOOD] >> test.py::test[type_v3-decimal_yt--Results] >> test.py::test[count-count_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[count-count_all-default.txt-Results] >> test.py::test[join-yql-10654_pullup_with_sys_columns--Results] [GOOD] >> test.py::test[select-trivial_group_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-trivial_group_by-default.txt-Results] >> test.py::test[order_by-sort_with_take_limit--Results] [GOOD] >> test.py::test[pg-all_data--Results] >> test.py::test[join-yql-14847-off-ForceBlocks] >> test.py::test[type_v3-ignore_v3_pragma--Results] [GOOD] >> test.py::test[udf-python_script--Results] >> test.py::test[insert-keepmeta_nonstrict_fail--ForceBlocks] [GOOD] >> test.py::test[insert-keepmeta_nonstrict_fail--Results] [GOOD] >> test.py::test[insert-keepmeta_proto_fail--ForceBlocks] >> test.py::test[window-full/aggregations_leadlag--ForceBlocks] [GOOD] >> test.py::test[window-full/aggregations_leadlag--Results] >> test.py::test[join-lookupjoin_with_cache--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_with_cache--Results] >> test.py::test[schema-select_all-row_spec_extra_sort-ForceBlocks] [GOOD] >> test.py::test[schema-select_all-row_spec_extra_sort-Results] >> test.py::test[count-count_distinct_from_view_concat--Results] [GOOD] >> test.py::test[datetime-date_tz_table_sort_desc--ForceBlocks] >> test.py::test[count-count_all-default.txt-Results] [GOOD] >> test.py::test[count-count_by_nulls--ForceBlocks] >> test.py::test[expr-tagged_runtime-default.txt-Results] [GOOD] >> test.py::test[hor_join-yield_off--ForceBlocks] [SKIPPED] >> test.py::test[insert-merge_publish--Results] [GOOD] >> test.py::test[insert-two_input_tables--ForceBlocks] >> test.py::test[blocks-combine_all_min_filter_opt--Results] [GOOD] >> test.py::test[blocks-combine_hashed_max--Results] >> test.py::test[type_v3-decimal_yt--Results] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-ForceBlocks] >> test.py::test[weak_field-weak_field_esc_yson--Results] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q13-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q52-default.txt-Results] >> test.py::test[join-inner_grouped_by_expr-off-ForceBlocks] >> test.py::test[blocks-compare--ForceBlocks] [GOOD] >> test.py::test[blocks-compare--Results] >> test.py::test[optimizers-yql-6008_limit_after_map--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-6008_limit_after_map--Results] >> test.py::test[sampling-join_left_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-join_left_sample-default.txt-Results] >> test.py::test[pg-select_common_type_unionall--Results] [GOOD] >> test.py::test[pg-tpcds-q05-default.txt-Results] >> test.py::test[join-left_join_right_pushdown_no_opt--Results] [GOOD] >> test.py::test[join-lookupjoin_with_cache-off-ForceBlocks] >> test.py::test[pg-tpcds-q91-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q91-default.txt-Results] >> test.py::test[schema-select_all-row_spec_extra_sort-Results] [GOOD] >> test.py::test[schema-user_schema_directread-default.txt-ForceBlocks] >> test.py::test[insert-keepmeta_proto_fail--ForceBlocks] [GOOD] >> test.py::test[insert-keepmeta_proto_fail--Results] [GOOD] >> test.py::test[insert-keepmeta_view_fail--ForceBlocks] >> test.py::test[select-trivial_group_by-default.txt-Results] [GOOD] >> test.py::test[select-trivial_where-one-ForceBlocks] >> test.py::test[join-premap_merge_extrasort2-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_merge_extrasort2-off-Results] [SKIPPED] >> test.py::test[join-pullup_context_dep-off-ForceBlocks] |54.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[hor_join-yield_off--ForceBlocks] [SKIPPED] >> test.py::test[join-lookupjoin_semi_subq--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_semi_subq--Results] >> test.py::test[count-count_all-default.txt-Results] [GOOD] >> test.py::test[count-count_const_no_grouping-default.txt-Results] >> test.py::test[join-lookupjoin_with_cache--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence--ForceBlocks] >> test.py::test[result_types-data-default.txt-Results] [GOOD] >> test.py::test[sampling-direct_read--Results] |54.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_blobs_written.cpp >> test.py::test[pg-select_from_columns_qstar-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt-Results] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-Results] |54.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/transaction/tx_blobs_written.cpp >> test.py::test[join-join_without_correlation_names--Results] [GOOD] >> test.py::test[join-left_join_null_column--Results] >> test.py::test[window-current/session_aliases--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-6008_limit_after_map--Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_yson--Results] [GOOD] |54.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a >> test.py::test[window-current/session_aliases--Results] |54.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a >> test.py::test[weak_field-weak_field_rest--Results] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--ForceBlocks] >> test.py::test[pg-tpcds-q61-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] [GOOD] >> test.py::test[optimizers-yql-6038_direct_row--Results] >> test.py::test[pg-tpcds-q91-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q04-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q61-default.txt-Results] |54.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a >> test.py::test[blocks-compare--Results] [GOOD] >> test.py::test[blocks-distinct_pure_keys--ForceBlocks] >> test.py::test[join-left_join_right_pushdown_simple--ForceBlocks] [GOOD] >> test.py::test[join-left_join_right_pushdown_simple--Results] >> test.py::test[pg-tpch-q09-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q12-default.txt-Results] >> test.py::test[pg-tpcds-q02-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q02-default.txt-Results] >> test.py::test[select-create_tuples-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-create_tuples-default.txt-Results] >> test.py::test[insert-keepmeta_view_fail--ForceBlocks] [GOOD] >> test.py::test[insert-keepmeta_view_fail--Results] [GOOD] >> test.py::test[insert-override_view_fail--ForceBlocks] |54.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc.cpp |54.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/tier/gc.cpp >> test.py::test[aggregate-too_wide-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_native-default-ForceBlocks] [SKIPPED] >> test.py::test[bigdate-table_yt_native-default-Results] [SKIPPED] >> test.py::test[blocks-block_input-aux_columns-ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input-aux_columns-Results] [SKIPPED] >> test.py::test[blocks-boolean_ops--ForceBlocks] >> test.py::test[join-yql-8125--ForceBlocks] [GOOD] >> test.py::test[join-yql-8125--Results] >> test.py::test[udf-python_script--Results] [GOOD] >> test.py::test[udf-udaf--Results] >> test.py::test[sampling-join_left_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-ForceBlocks] >> test.py::test[join-lookupjoin_semi_subq--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary-off-ForceBlocks] >> test.py::test[aggregate-group_by_gs_alt_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-Results] >> test.py::test[aggregate-having_distinct_expr--Results] [GOOD] >> test.py::test[aggregate-native_desc_group_compact_by--Results] >> test.py::test[pg-tpcds-q52-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q56-default.txt-Results] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-Results] >> test.py::test[pg-tpcds-q61-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q09-default.txt-ForceBlocks] >> test.py::test[join-yql-14847-off-ForceBlocks] [GOOD] >> test.py::test[join-yql-14847-off-Results] [SKIPPED] >> test.py::test[join-yql-8131-off-ForceBlocks] [SKIPPED] >> test.py::test[join-yql-8131-off-Results] [SKIPPED] >> test.py::test[join-yql-8980--ForceBlocks] >> test.py::test[key_filter-contains_tuples_no_keyfilter-default.txt-Results] [GOOD] >> test.py::test[key_filter-is_null_multi_key--ForceBlocks] >> test.py::test[pg-tpcds-q02-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q06-default.txt-ForceBlocks] >> test.py::test[count-count_const_no_grouping-default.txt-Results] [GOOD] >> test.py::test[count-count_nullable--Results] >> test.py::test[pg-tpcds-q05-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q29-default.txt-Results] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-ForceBlocks] [GOOD] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-Results] >> test.py::test[count-count_by_nulls--ForceBlocks] [GOOD] >> test.py::test[count-count_by_nulls--Results] |54.5%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |54.5%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a >> test.py::test[tpch-q11-default.txt-Results] [GOOD] >> test.py::test[type_v3-append_diff_flags--Results] >> test.py::test[select-create_tuples-default.txt-Results] [GOOD] >> test.py::test[select-substring_v1-default.txt-ForceBlocks] |54.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a >> test.py::test[join-star_join_inners--Results] [GOOD] >> test.py::test[join-star_join_mirror-off-Results] [SKIPPED] >> test.py::test[join-yql-8131--Results] >> test.py::test[insert-override_view_fail--ForceBlocks] [GOOD] >> test.py::test[insert-override_view_fail--Results] [GOOD] >> test.py::test[join-anyjoin_common_dup--ForceBlocks] >> test.py::test[join-yql-8131--Results] [SKIPPED] >> test.py::test[key_filter-contains_tuples-default.txt-Results] >> test.py::test[join-left_join_right_pushdown_simple--Results] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst--ForceBlocks] >> test.py::test[pg-tpcds-q03-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Results] >> test.py::test[join-inner_grouped_by_expr-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners_premap--ForceBlocks] >> test.py::test[insert-two_input_tables--ForceBlocks] [GOOD] >> test.py::test[insert-two_input_tables--Results] >> test.py::test[join-inner_grouped_by_expr-off-Results] [SKIPPED] >> test.py::test[join-left_only_with_other--ForceBlocks] >> test.py::test[window-current/session_aliases--Results] [GOOD] >> test.py::test[window-generic/aggregations_after_current--ForceBlocks] >> test.py::test[datetime-date_tz_table_sort_desc--ForceBlocks] [GOOD] >> test.py::test[datetime-date_tz_table_sort_desc--Results] >> test.py::test[pg-all_data--Results] [GOOD] >> test.py::test[pg-pg_column_case--Results] >> test.py::test[sampling-direct_read--Results] [GOOD] >> test.py::test[sampling-join_right_sample-default.txt-Results] >> test.py::test[blocks-combine_hashed_max--Results] [GOOD] >> test.py::test[blocks-date_less_or_equal--Results] >> test.py::test[schema-user_schema_directread-default.txt-ForceBlocks] [GOOD] >> test.py::test[schema-user_schema_directread-default.txt-Results] >> test.py::test[select-trivial_where-one-ForceBlocks] [GOOD] >> test.py::test[select-trivial_where-one-Results] >> test.py::test[join-pullup_context_dep-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_context_dep-off-Results] [SKIPPED] >> test.py::test[join-split_to_list_as_key--ForceBlocks] >> test.py::test[type_v3-ignore_v3_hint-tag_opt-Results] [GOOD] >> test.py::test[udf-udf_call_with_group_and_limit--ForceBlocks] >> test.py::test[optimizers-yql-4240-aggregate_whole_struct-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-6133_skip_deps--Results] [SKIPPED] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--Results] >> test.py::test[count-count_by_nulls--Results] [GOOD] >> test.py::test[distinct-distinct_columns-default.txt-ForceBlocks] >> test.py::test[bigdate-table_int_cast-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_key_filter-default-Results] [SKIPPED] >> test.py::test[bigdate-tz_table_yt_key_filter--Results] [SKIPPED] >> test.py::test[blocks-add_int64--Results] >> test.py::test[insert-two_input_tables--Results] [GOOD] >> test.py::test[insert_monotonic-several2-default.txt-ForceBlocks] >> test.py::test[weak_field-weak_field_rest--Results] [GOOD] >> test.py::test[weak_field-weak_field_to_yson--Results] >> test.py::test[join-mapjoin_early_rewrite_sequence--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence--Results] >> test.py::test[join-lookupjoin_with_cache-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_with_cache-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_dup_key--ForceBlocks] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-Results] [GOOD] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-ForceBlocks] >> test.py::test[join-yql-8125--Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-ForceBlocks] >> test.py::test[window-full/aggregations_leadlag--Results] [GOOD] >> test.py::test[window-generic/session--ForceBlocks] >> test.py::test[datetime-date_tz_table_sort_desc--Results] [GOOD] >> test.py::test[dq-truncate_local-default.txt-ForceBlocks] |54.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/hooks/testing/controller.cpp |54.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/hooks/testing/controller.cpp |54.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |54.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |54.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a >> test.py::test[pg-tpch-q04-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q15-default.txt-Results] >> test.py::test[pg-tpcds-q56-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q61-default.txt-Results] >> test.py::test[schema-user_schema_directread-default.txt-Results] [GOOD] >> test.py::test[select-hits_count--ForceBlocks] >> test.py::test[select-trivial_where-one-Results] [GOOD] >> test.py::test[select-unlabeled--ForceBlocks] >> test.py::test[blocks-boolean_ops--ForceBlocks] [GOOD] >> test.py::test[blocks-boolean_ops--Results] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--Results] >> test.py::test[type_v3-append_diff_flags--Results] [GOOD] >> test.py::test[type_v3-singulars--Results] [SKIPPED] >> test.py::test[udf-named_args_for_script--Results] >> test.py::test[blocks-distinct_pure_keys--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_pure_keys--Results] >> test.py::test[aggregate-group_by_rollup_grouping--ForceBlocks] >> test.py::test[join-mergejoin_big_primary-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_big_primary-off-Results] |54.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/constructor.cpp >> test.py::test[join-mergejoin_big_primary-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align1--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_align1--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_reverse_key_order-off-ForceBlocks] |54.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/constructor.cpp |54.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |54.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |54.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a >> test.py::test[pg-tpcds-q29-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q58-default.txt-Results] >> test.py::test[pg-tpcds-q06-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q06-default.txt-Results] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-Results] >> test.py::test[window-distinct_over_window_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-distinct_over_window_struct-default.txt-Results] >> test.py::test[join-left_join_null_column--Results] [GOOD] >> test.py::test[join-left_null_literal--Results] >> test.py::test[key_filter-is_null_multi_key--ForceBlocks] [GOOD] >> test.py::test[key_filter-is_null_multi_key--Results] >> test.py::test[pg-tpcds-q09-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q22-default.txt-Results] >> test.py::test[udf-udaf--Results] [GOOD] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-Results] >> test.py::test[select-substring_v1-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-substring_v1-default.txt-Results] >> test.py::test[join-yql-8980--ForceBlocks] [GOOD] >> test.py::test[join-yql-8980--Results] >> test.py::test[pg-tpch-q04-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q04-default.txt-Results] >> test.py::test[join-mapjoin_early_rewrite_sequence--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-ForceBlocks] >> test.py::test[pg-pg_column_case--Results] [GOOD] >> test.py::test[pg-select_columnref2-default.txt-Results] >> test.py::test[blocks-boolean_ops--Results] [GOOD] >> test.py::test[blocks-combine_all_some--ForceBlocks] >> test.py::test[pg-tpch-q12-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q14-default.txt-Results] >> test.py::test[count-count_nullable--Results] [GOOD] >> test.py::test[dq-read_cost_native-default.txt-Results] [SKIPPED] >> test.py::test[dq-wrong_script_segf--Results] [SKIPPED] >> test.py::test[epochs-reset_sortness_on_append--Results] >> test.py::test[pg-tpcds-q06-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q11-default.txt-ForceBlocks] >> test.py::test[key_filter-is_null_multi_key--Results] [GOOD] >> test.py::test[key_filter-part_key_over_dynamic--ForceBlocks] >> test.py::test[blocks-add_int64--Results] [GOOD] >> test.py::test[blocks-add_uint64--Results] >> test.py::test[weak_field-weak_field_to_yson--Results] [GOOD] >> test.py::test[window-full/session--Results] >> test.py::test[join-left_only_with_other--ForceBlocks] [GOOD] >> test.py::test[join-left_only_with_other--Results] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--Results] [GOOD] >> test.py::test[order_by-literal_complex--ForceBlocks] >> test.py::test[optimizers-yql-6038_direct_row--Results] [GOOD] >> test.py::test[select-substring_v1-default.txt-Results] [GOOD] >> test.py::test[select-trivial_having-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_gs_subselect-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_mul_gs_ru--Results] >> test.py::test[distinct-distinct_columns-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_columns-default.txt-Results] >> test.py::test[join-lookupjoin_bug7646_subst--ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_bug7646_subst--Results] >> test.py::test[pg-tpcds-q61-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q75-default.txt-Results] >> test.py::test[udf-udf_call_with_group_and_limit--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_dup_key--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_dup_key--Results] >> test.py::test[sampling-orderedjoin_right_sample-default.txt-Results] [GOOD] >> test.py::test[schema-limit_directread--ForceBlocks] >> test.py::test[join-split_to_list_as_key--ForceBlocks] [GOOD] >> test.py::test[join-split_to_list_as_key--Results] >> test.py::test[join-star_join_inners_premap--ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners_premap--Results] >> test.py::test[blocks-distinct_pure_keys--Results] [GOOD] >> test.py::test[blocks-group_by_complex_key--ForceBlocks] |54.6%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/blobs_action/storages_manager/manager.cpp |54.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[optimizers-yql-6038_direct_row--Results] [GOOD] >> test.py::test[key_filter-contains_tuples-default.txt-Results] [GOOD] >> test.py::test[key_filter-empty_range--Results] >> test.py::test[sampling-join_right_sample-default.txt-Results] [GOOD] >> test.py::test[sampling-map-dynamic-Results] >> test.py::test[pg-tpch-q09-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q09-default.txt-Results] |54.6%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/blobs_action/storages_manager/manager.cpp |54.6%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |54.6%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |54.6%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] >> test.py::test[window-distinct_over_window_struct-default.txt-Results] [GOOD] >> test.py::test[window-win_extract_members-default.txt-ForceBlocks] >> test.py::test[select-unlabeled--ForceBlocks] [GOOD] >> test.py::test[select-unlabeled--Results] >> test.py::test[udf-named_args_for_script--Results] [GOOD] >> test.py::test[udf-regexp_udf--Results] >> test.py::test[dq-truncate_local-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-truncate_local-default.txt-Results] [SKIPPED] >> test.py::test[expr-non_persistable_group_by_some_fail--ForceBlocks] >> test.py::test[insert_monotonic-several2-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-several2-default.txt-Results] |54.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[udf-udf_call_with_group_and_limit--ForceBlocks] [GOOD] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-Results] [GOOD] >> test.py::test[view-file_inner_udf--Results] >> test.py::test[view-file_inner_udf--Results] [SKIPPED] >> test.py::test[weak_field-few_source_different_columns--Results] >> test.py::test[join-anyjoin_common_dup--ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_common_dup--Results] >> test.py::test[pg-tpcds-q22-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q58-default.txt-Results] [GOOD] >> test.py::test[join-left_null_literal--Results] [GOOD] >> test.py::test[join-left_semi_with_other-off-Results] [SKIPPED] >> test.py::test[join-left_trivial-off-Results] >> test.py::test[pg-tpcds-q67-default.txt-Results] >> test.py::test[select-hits_count--ForceBlocks] [GOOD] >> test.py::test[select-hits_count--Results] >> test.py::test[join-left_trivial-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi--Results] >> test.py::test[distinct-distinct_columns-default.txt-Results] [GOOD] >> test.py::test[distinct-distinct_window-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_dup_key--Results] [GOOD] >> test.py::test[optimizers-yql-8953_logical_fuse_with_table_props--Results] [GOOD] >> test.py::test[order_by-SortByOneFieldDesc--Results] >> test.py::test[join-yql-8980--Results] [GOOD] >> test.py::test[json-jsondocument/insert--ForceBlocks] >> test.py::test[join-lookupjoin_bug7646_subst--Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o-off-ForceBlocks] >> test.py::test[select-unlabeled--Results] [GOOD] >> test.py::test[select_yql-from_table_with_explicit_cluster-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[key_filter-pushdown_keyextract_passthrough-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q04-default.txt-Results] [GOOD] >> test.py::test[pragma-file-default.txt-ForceBlocks] >> test.py::test[join-left_only_with_other--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star--ForceBlocks] >> test.py::test[key_filter-string_with_ff-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_with_reverse_key_order-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_reverse_key_order-off-Results] [SKIPPED] >> test.py::test[join-nested_semi_join-off-ForceBlocks] >> test.py::test[expr-non_persistable_group_by_some_fail--ForceBlocks] [GOOD] >> test.py::test[expr-non_persistable_group_by_some_fail--Results] >> test.py::test[insert_monotonic-several2-default.txt-Results] [GOOD] >> test.py::test[expr-non_persistable_group_by_some_fail--Results] [GOOD] >> test.py::test[file-parse_file_in_select_as_str--ForceBlocks] >> test.py::test[join-compact_join--ForceBlocks] |54.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[pg-tpcds-q22-default.txt-Results] [GOOD] >> test.py::test[join-split_to_list_as_key--Results] [GOOD] >> test.py::test[join-star_join-off-ForceBlocks] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-Results] >> test.py::test[blocks-add_uint64--Results] [GOOD] >> test.py::test[blocks-boolean_ops_scalar--Results] >> test.py::test[window-generic/aggregations_after_current--ForceBlocks] [GOOD] >> test.py::test[window-generic/aggregations_after_current--Results] >> test.py::test[epochs-reset_sortness_on_append--Results] [GOOD] >> test.py::test[file-file_list_simple--Results] >> test.py::test[aggregate-native_desc_group_compact_by--Results] [GOOD] >> test.py::test[aggregate-table_row_aggregation-default.txt-Results] |54.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[join-mapjoin_dup_key--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_sequence-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort--ForceBlocks] >> test.py::test[pg-tpcds-q11-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q11-default.txt-Results] >> test.py::test[pg-tpch-q14-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q18-default.txt-Results] |54.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[select_yql-from_table_with_explicit_cluster-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[pg-select_columnref2-default.txt-Results] [GOOD] >> test.py::test[join-star_join_inners_premap--Results] [GOOD] >> test.py::test[join-star_join_inners_premap-off-ForceBlocks] >> test.py::test[key_filter-part_key_over_dynamic--ForceBlocks] [GOOD] >> test.py::test[key_filter-part_key_over_dynamic--Results] >> test.py::test[pg-tpch-q15-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q17-default.txt-Results] >> test.py::test[select-hits_count--Results] [GOOD] >> test.py::test[select-trivial_order_by-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_some--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_some--Results] >> test.py::test[order_by-literal_complex--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_complex--Results] >> test.py::test[aggregate-group_by_rollup_grouping--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping--Results] >> test.py::test[pg-tpcds-q75-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q76-default.txt-Results] |54.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/control/ut/ydb-core-control-ut |54.7%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |54.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |54.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[pg-select_columnref2-default.txt-Results] [GOOD] >> test.py::test[schema-limit_directread--ForceBlocks] [GOOD] >> test.py::test[schema-limit_directread--Results] >> test.py::test[select-trivial_having-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-trivial_having-default.txt-Results] >> test.py::test[window-generic/session--ForceBlocks] [GOOD] >> test.py::test[window-generic/session--Results] |54.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |54.7%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |54.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |54.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |54.7%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |54.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |54.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |54.7%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |54.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut >> test.py::test[udf-regexp_udf--Results] [GOOD] >> test.py::test[union_all-mix_map_and_project--Results] >> test.py::test[pg-tpcds-q11-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q15-default.txt-ForceBlocks] >> test.py::test[weak_field-few_source_different_columns--Results] [GOOD] >> test.py::test[weak_field-weak_field_infer_scheme--Results] >> test.py::test[pg-tpch-q09-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_all-default.txt-Results] >> test.py::test[sampling-map-dynamic-Results] [GOOD] >> test.py::test[schema-fake_column-default.txt-Results] >> test.py::test[blocks-group_by_complex_key--ForceBlocks] [GOOD] >> test.py::test[blocks-group_by_complex_key--Results] >> test.py::test[order_by-literal_complex--Results] [GOOD] >> test.py::test[order_by-order_by_expr_over_sorted_table--ForceBlocks] >> test.py::test[produce-reduce_all-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_all_list_stream--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_all_list_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_subfields--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_subfields--Results] [SKIPPED] >> test.py::test[key_filter-part_key_over_dynamic--Results] [GOOD] >> test.py::test[key_filter-yql-8117-table_key_filter--ForceBlocks] |54.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |54.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |54.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-Results] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--ForceBlocks] >> test.py::test[schema-limit_directread--Results] [GOOD] >> test.py::test[schema-remap_desc--ForceBlocks] >> test.py::test[blocks-combine_all_some--Results] [GOOD] >> test.py::test[blocks-combine_hashed_avg--ForceBlocks] >> test.py::test[pragma-file-default.txt-ForceBlocks] [GOOD] >> test.py::test[pragma-file-default.txt-Results] >> test.py::test[order_by-SortByOneFieldDesc--Results] [GOOD] >> test.py::test[order_by-assume_over_input_desc--Results] [SKIPPED] >> test.py::test[order_by-limit--Results] >> test.py::test[join-lookupjoin_semi--Results] [GOOD] >> test.py::test[join-mapjoin_with_anonymous--Results] |54.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[produce-reduce_subfields--Results] [SKIPPED] >> test.py::test[select-trivial_having-default.txt-Results] [GOOD] >> test.py::test[select-trivial_where-many-ForceBlocks] >> test.py::test[blocks-boolean_ops_scalar--Results] [GOOD] >> test.py::test[blocks-combine_all_max--Results] >> test.py::test[pg-tpcds-q67-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q73-default.txt-Results] >> test.py::test[window-generic/aggregations_after_current--Results] [GOOD] >> test.py::test[window-row_number_to_map-default.txt-ForceBlocks] >> test.py::test[json-jsondocument/insert--ForceBlocks] [GOOD] >> test.py::test[json-jsondocument/insert--Results] >> test.py::test[join-lookupjoin_inner_1o2o-off-ForceBlocks] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite--ForceBlocks] >> test.py::test[join-nested_semi_join-off-ForceBlocks] [GOOD] >> test.py::test[join-nested_semi_join-off-Results] [SKIPPED] >> test.py::test[join-order_of_qualified--ForceBlocks] >> test.py::test[insert-select_after_insert_relabeled-default.txt-ForceBlocks] >> test.py::test[file-parse_file_in_select_as_str--ForceBlocks] [GOOD] >> test.py::test[file-parse_file_in_select_as_str--Results] >> test.py::test[join-mapjoin_early_rewrite_star--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star--Results] >> test.py::test[aggr_factory-multi--Results] [GOOD] >> test.py::test[blocks-date_less_or_equal--Results] [GOOD] >> test.py::test[blocks-decimal_avg--Results] >> test.py::test[aggr_factory-top-default.txt-Results] >> test.py::test[file-file_list_simple--Results] [GOOD] >> test.py::test[file-where_key_in_get_file_content--Results] >> test.py::test[pragma-file-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_field_subset--ForceBlocks] >> test.py::test[join-compact_join--ForceBlocks] [GOOD] >> test.py::test[join-compact_join--Results] >> test.py::test[window-generic/session--Results] [GOOD] >> test.py::test[window-lagging/aggregations--ForceBlocks] >> test.py::test[window-win_extract_members-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_extract_members-default.txt-Results] >> test.py::test[pg-tpcds-q76-default.txt-Results] [GOOD] >> test.py::test[blocks-group_by_complex_key--Results] [GOOD] >> test.py::test[pg-tpcds-q90-default.txt-Results] >> test.py::test[join-anyjoin_common_dup--Results] [GOOD] >> test.py::test[join-anyjoin_common_dup-off-ForceBlocks] >> test.py::test[select-trivial_order_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-trivial_order_by-default.txt-Results] >> test.py::test[json-jsondocument/insert--Results] [GOOD] >> test.py::test[key_filter-dependent_value-default.txt-ForceBlocks] >> test.py::test[select-from_in_front_sub-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_rollup_grouping--Results] [GOOD] >> test.py::test[aggregate-having_distinct_expr--ForceBlocks] >> test.py::test[distinct-distinct_window-default.txt-ForceBlocks] [GOOD] >> test.py::test[distinct-distinct_window-default.txt-Results] |54.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[blocks-group_by_complex_key--Results] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-string_with_ff-default.txt-Results] >> test.py::test[aggregate-table_row_aggregation-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q15-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q15-default.txt-Results] >> test.py::test[aggregate-too_wide-default.txt-Results] >> test.py::test[join-star_join-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join-off-Results] [SKIPPED] >> test.py::test[join-yql-14847--ForceBlocks] >> test.py::test[aggregate-group_by_mul_gs_ru--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Results] >> test.py::test[weak_field-weak_field_infer_scheme--Results] [GOOD] >> test.py::test[weak_field-weak_field_long_fields--Results] >> test.py::test[file-parse_file_in_select_as_str--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite_star--Results] [GOOD] >> test.py::test[join-mapjoin_with_empty_struct--ForceBlocks] >> test.py::test[union_all-mix_map_and_project--Results] [GOOD] >> test.py::test[union_all-union_all_multiple-default.txt-Results] >> test.py::test[join-star_join_inners_premap-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_inners_premap-off-Results] >> test.py::test[schema-fake_column-default.txt-Results] [GOOD] >> test.py::test[join-star_join_inners_premap-off-Results] [SKIPPED] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-ForceBlocks] >> test.py::test[select-trivial_order_by-default.txt-Results] [GOOD] >> test.py::test[select_yql-from_select_table-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[select_yql-from_select_table-default.txt-Results] [SKIPPED] >> test.py::test[select_yql-from_table_without_cluster--ForceBlocks] [SKIPPED] >> test.py::test[select_yql-from_table_without_cluster--Results] [SKIPPED] >> test.py::test[select_yql-order_by_limit_offset-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[select_yql-order_by_limit_offset-default.txt-Results] [SKIPPED] >> test.py::test[type_v3-mergejoin_with_sort--ForceBlocks] |54.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[file-parse_file_in_select_as_str--Results] [GOOD] >> test.py::test[schema-remap_desc--ForceBlocks] [GOOD] >> test.py::test[schema-remap_desc--Results] >> test.py::test[order_by-order_by_expr_over_sorted_table--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_expr_over_sorted_table--Results] >> test.py::test[pg-tpcds-q15-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q73-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q24-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q95-default.txt-Results] >> test.py::test[window-win_extract_members-default.txt-Results] [GOOD] >> test.py::test[window-yql-15636-default.txt-ForceBlocks] >> test.py::test[join-compact_join--Results] [GOOD] >> test.py::test[join-inner_grouped--ForceBlocks] >> test.py::test[key_filter-string_with_ff-default.txt-Results] [GOOD] >> test.py::test[library-package--ForceBlocks] [SKIPPED] >> test.py::test[library-package--Results] [SKIPPED] |54.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[schema-fake_column-default.txt-Results] [GOOD] >> test.py::test[type_v3-append_diff_layout1--Results] >> test.py::test[key_filter-yql-8117-table_key_filter--ForceBlocks] [GOOD] >> test.py::test[key_filter-yql-8117-table_key_filter--Results] >> test.py::test[select-trivial_where-many-ForceBlocks] [GOOD] >> test.py::test[select-trivial_where-many-Results] >> test.py::test[blocks-combine_hashed_avg--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_avg--Results] >> test.py::test[tpch-q22-default.txt-ForceBlocks] >> test.py::test[join-pushdown_filter_over_left--ForceBlocks] >> test.py::test[pg-tpcds-q90-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q93-default.txt-Results] |54.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[library-package--Results] [SKIPPED] >> test.py::test[key_filter-empty_range--Results] [GOOD] >> test.py::test[key_filter-key_double_opt_suffix--Results] [SKIPPED] >> test.py::test[key_filter-lambda_with_null_filter--Results] >> test.py::test[join-mapjoin_dup_key-off-ForceBlocks] >> test.py::test[order_by-limit--Results] [GOOD] >> test.py::test[order_by-sort_simple--Results] >> test.py::test[window-full/session--Results] [GOOD] >> test.py::test[window-full/session_aliases--Results] >> test.py::test[file-where_key_in_get_file_content--Results] [GOOD] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Results] >> test.py::test[order_by-order_by_expr_over_sorted_table--Results] [GOOD] >> test.py::test[order_by-order_by_expr_simple--ForceBlocks] >> test.py::test[join-order_of_qualified--ForceBlocks] [GOOD] >> test.py::test[schema-remap_desc--Results] [GOOD] >> test.py::test[join-mapjoin_early_rewrite--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_early_rewrite--Results] >> test.py::test[join-mapjoin_with_anonymous--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary-off-Results] [SKIPPED] >> test.py::test[join-premap_common_cross--Results] >> test.py::test[select-trivial_where-many-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_max--Results] [GOOD] >> test.py::test[blocks-combine_all_minmax_double--Results] >> test.py::test[insert-select_after_insert_relabeled-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-select_after_insert_relabeled-default.txt-Results] >> test.py::test[distinct-distinct_window-default.txt-Results] [GOOD] >> test.py::test[expr-evaluate_parse_inf_nan--ForceBlocks] [SKIPPED] >> test.py::test[expr-evaluate_parse_inf_nan--Results] [SKIPPED] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-ForceBlocks] |54.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[join-order_of_qualified--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_all_field_subset--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_all_field_subset--Results] >> test.py::test[key_filter-dependent_value-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-dependent_value-default.txt-Results] >> test.py::test[window-row_number_to_map-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-row_number_to_map-default.txt-Results] >> test.py::test[key_filter-yql-8117-table_key_filter--Results] [GOOD] >> test.py::test[lambda-lambda_with_tie-default.txt-ForceBlocks] |54.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[schema-remap_desc--Results] [GOOD] >> test.py::test[weak_field-weak_field_long_fields--Results] [GOOD] >> test.py::test[window-distinct_over_window--Results] >> test.py::test[join-mergejoin_saves_output_sort--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort--Results] >> test.py::test[aggregate-having_distinct_expr--ForceBlocks] [GOOD] >> test.py::test[aggregate-having_distinct_expr--Results] >> test.py::test[sampling-bind_expr_subquery-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q95-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q06-default.txt-Results] >> test.py::test[select-from_in_front_sub-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-from_in_front_sub-default.txt-Results] >> test.py::test[blocks-decimal_avg--Results] [GOOD] >> test.py::test[blocks-decimal_unary--Results] >> test.py::test[blocks-combine_hashed_avg--Results] [GOOD] >> test.py::test[blocks-distinct_mixed_all--ForceBlocks] >> test.py::test[join-mapjoin_early_rewrite--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--ForceBlocks] >> test.py::test[join-mapjoin_with_empty_struct--ForceBlocks] [GOOD] >> test.py::test[insert-select_after_insert_relabeled-default.txt-Results] [GOOD] >> test.py::test[insert-trivial_literals_multirow-default.txt-ForceBlocks] >> test.py::test[join-yql-14847--ForceBlocks] [GOOD] >> test.py::test[join-yql-14847--Results] >> test.py::test[type_v3-append_diff_layout1--Results] [GOOD] >> test.py::test[udf-udaf_distinct--ForceBlocks] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Results] >> test.py::test[join-mapjoin_with_empty_struct--Results] >> test.py::test[union_all-union_all_multiple-default.txt-Results] [GOOD] >> test.py::test[view-file_eval--Results] >> test.py::test[key_filter-dependent_value-default.txt-Results] [GOOD] >> test.py::test[key_filter-key_double_opt_suffix--ForceBlocks] [SKIPPED] >> test.py::test[key_filter-key_double_opt_suffix--Results] [SKIPPED] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q93-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q17-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q22-default.txt-Results] |54.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> test.py::test[type_v3-mergejoin_with_sort--ForceBlocks] [GOOD] >> test.py::test[type_v3-mergejoin_with_sort--Results] |54.8%| [CC] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp |54.8%| [CC] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/columnshard_impl.h_serialized.cpp >> test.py::test[produce-reduce_all_field_subset--Results] [GOOD] >> test.py::test[produce-reduce_all_multi_in-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_all_multi_in-default.txt-Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_ref--ForceBlocks] >> test.py::test[pg-tpcds-q24-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--ForceBlocks] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--Results] >> test.py::test[join-anyjoin_common_dup-off-ForceBlocks] [GOOD] >> test.py::test[join-anyjoin_common_dup-off-Results] [SKIPPED] >> test.py::test[join-cbo_4tables_version1--ForceBlocks] [SKIPPED] >> test.py::test[join-cbo_4tables_version1--Results] [SKIPPED] >> test.py::test[join-inner_grouped-off-ForceBlocks] >> test.py::test[pg-tpch-q18-default.txt-Results] [GOOD] >> test.py::test[produce-process_multi_out--Results] [SKIPPED] >> test.py::test[produce-process_rows_and_filter--Results] [SKIPPED] >> test.py::test[produce-process_with_assume--Results] >> test.py::test[join-inner_grouped--ForceBlocks] [GOOD] >> test.py::test[join-inner_grouped--Results] >> test.py::test[aggregate-group_by_hop_star--ForceBlocks] >> test.py::test[window-lagging/aggregations--ForceBlocks] [GOOD] >> test.py::test[window-lagging/aggregations--Results] >> test.py::test[aggregate-having_distinct_expr--Results] [GOOD] >> test.py::test[blocks-combine_all_decimal--ForceBlocks] >> test.py::test[flatten_by-flatten_by_aster_opt-default.txt-Results] [GOOD] |54.8%| [AR] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt-Results] >> test.py::test[select-from_in_front_sub-default.txt-Results] [GOOD] >> test.py::test[join-pushdown_filter_over_left--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_dup_key-off-ForceBlocks] [GOOD] >> test.py::test[select-one_unlabeled_column-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_with_empty_struct--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-ForceBlocks] [GOOD] >> test.py::test[join-pushdown_filter_over_left--Results] >> test.py::test[join-mapjoin_dup_key-off-Results] [SKIPPED] >> test.py::test[limit-empty_sort_calc_after_limit-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross--ForceBlocks] >> test.py::test[limit-limit--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-Results] >> test.py::test[join-mergejoin_any_no_join_reduce-off-ForceBlocks] >> test.py::test[view-file_eval--Results] [GOOD] |54.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[pg-tpcds-q93-default.txt-Results] [GOOD] >> test.py::test[view-secure_eval_dyn--Results] >> test.py::test[order_by-order_by_expr_simple--ForceBlocks] [GOOD] >> test.py::test[join-yql-14847--Results] [GOOD] >> test.py::test[order_by-order_by_expr_simple--Results] >> test.py::test[join-yql-8131--ForceBlocks] [SKIPPED] >> test.py::test[blocks-combine_all_minmax_double--Results] [GOOD] >> test.py::test[blocks-combine_hashed_set--Results] >> test.py::test[aggr_factory-top-default.txt-Results] [GOOD] >> test.py::test[lambda-lambda_with_tie-default.txt-ForceBlocks] [GOOD] >> test.py::test[lambda-lambda_with_tie-default.txt-Results] >> test.py::test[aggregate-aggregate_udf_nested--Results] |54.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[pg-tpcds-q24-default.txt-ForceBlocks] [GOOD] >> test.py::test[type_v3-mergejoin_with_sort--Results] [GOOD] >> test.py::test[type_v3-split--ForceBlocks] [SKIPPED] >> test.py::test[type_v3-split--Results] [SKIPPED] >> test.py::test[type_v3-uuid--ForceBlocks] >> test.py::test[tpch-q22-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q22-default.txt-Results] >> test.py::test[multicluster-extend-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-unordered_over_sort--Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_group_one_of_multi--Results] |54.8%| [AR] {RESULT} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a >> test.py::test[order_by-sort_simple--Results] [GOOD] >> test.py::test[params-complex_yson--Results] >> test.py::test[window-yql-15636-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-yql-15636-default.txt-Results] >> test.py::test[aggregate-group_by_ru_join_star-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_ru_with_window_func--Results] >> test.py::test[pg-tpcds-q64-default.txt-Results] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-Results] >> test.py::test[blocks-decimal_unary--Results] [GOOD] >> test.py::test[blocks-distinct_mixed_keys--Results] >> test.py::test[key_filter-lambda_with_null_filter--Results] [GOOD] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_qualified_all_enable-default.txt-Results] [GOOD] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--ForceBlocks] >> test.py::test[join-inner_grouped--Results] [GOOD] >> test.py::test[join-join_cbo_3_tables--ForceBlocks] |54.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[join-yql-8131--ForceBlocks] [SKIPPED] |54.8%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a >> test.py::test[insert-trivial_literals_multirow-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert-trivial_literals_multirow-default.txt-Results] >> test.py::test[lambda-lambda_with_tie-default.txt-Results] [GOOD] >> test.py::test[lineage-select_field_filter-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-select_field_filter-default.txt-Results] [SKIPPED] >> test.py::test[order_by-order_by_expr_simple--Results] [GOOD] >> test.py::test[order_by-sort_with_take_limit--ForceBlocks] >> test.py::test[window-row_number_to_map-default.txt-Results] [GOOD] >> test.py::test[window-row_number_to_map_noncompact-default.txt-ForceBlocks] >> test.py::test[lineage-window_many-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_many-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-unessential_filter_over_prune_keys--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-unessential_filter_over_prune_keys--Results] [SKIPPED] >> test.py::test[optimizers-yql-2582_limit_for_join_input--ForceBlocks] >> test.py::test[blocks-distinct_mixed_all--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_mixed_all--Results] >> test.py::test[window-lagging/aggregations--Results] [GOOD] >> test.py::test[window-row_number_no_part_multi_input-default.txt-ForceBlocks] >> test.py::test[type_v3-append_struct-default.txt-Results] >> test.py::test[pg-tpch-q06-default.txt-Results] [GOOD] >> test.py::test[view-secure_eval_dyn--Results] [GOOD] >> test.py::test[view-view_with_lambda--Results] >> test.py::test[pg-tpch-q11-default.txt-Results] >> test.py::test[join-premap_common_cross--Results] [GOOD] >> test.py::test[join-premap_common_inner_both_sides-off-Results] [SKIPPED] >> test.py::test[join-premap_common_inner_filter--Results] >> test.py::test[produce-process_with_assume--Results] [GOOD] >> test.py::test[produce-process_with_lambda-default.txt-Results] >> test.py::test[join-pushdown_filter_over_left--Results] [GOOD] >> test.py::test[join-star_join_mirror--ForceBlocks] >> test.py::test[sampling-bind_expr_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-bind_expr_subquery-default.txt-Results] >> test.py::test[udf-udaf_distinct--ForceBlocks] [GOOD] >> test.py::test[udf-udaf_distinct--Results] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--Results] [GOOD] >> test.py::test[window-win_lead_in_mem-default.txt-ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--Results] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-ForceBlocks] [GOOD] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] >> test.py::test[order_by-literal_desc--Results] >> test.py::test[insert-trivial_literals_multirow-default.txt-Results] [GOOD] >> test.py::test[insert-yql-13083--ForceBlocks] >> test.py::test[order_by-literal_desc--Results] [SKIPPED] >> test.py::test[order_by-literal_with_assume_desc--Results] [SKIPPED] >> test.py::test[order_by-order_by_dot_column-default.txt-Results] >> test.py::test[join-inner_grouped-off-ForceBlocks] [GOOD] >> test.py::test[join-inner_grouped-off-Results] [SKIPPED] >> test.py::test[select-one_unlabeled_column-default.txt-ForceBlocks] [GOOD] >> test.py::test[select-one_unlabeled_column-default.txt-Results] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_dict_by_opt--ForceBlocks] >> test.py::test[aggregate-group_by_hop_star--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop_star--Results] [SKIPPED] >> test.py::test[aggregate-group_by_rollup_aggr_expr--ForceBlocks] >> test.py::test[limit-limit--ForceBlocks] [GOOD] >> test.py::test[limit-limit--Results] >> test.py::test[params-complex_yson--Results] [GOOD] >> test.py::test[pg-insert--Results] |54.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[join-inner_grouped-off-Results] [SKIPPED] >> test.py::test[tpch-q22-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_columns_by_aggregate-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_any_no_join_reduce-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_any_no_join_reduce-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_narrows_output_sort--ForceBlocks] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-Results] >> test.py::test[blocks-distinct_mixed_all--Results] [GOOD] >> test.py::test[blocks-exists--ForceBlocks] >> test.py::test[sampling-bind_expr_subquery-default.txt-Results] [GOOD] >> test.py::test[sampling-sample-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_saves_output_sort--Results] [GOOD] >> test.py::test[join-mergejoin_sorts_output_for_sort_right--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_right--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--ForceBlocks] >> test.py::test[window-yql-15636-default.txt-Results] [GOOD] >> test.py::test[tpch-q3-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_saves_output_sort_cross--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_cross--Results] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] [GOOD] >> test.py::test[key_filter-utf8_with_legacy--ForceBlocks] >> test.py::test[library-package_override--ForceBlocks] [SKIPPED] >> test.py::test[library-package_override--Results] [SKIPPED] >> test.py::test[limit-yql-8046_empty_sorted_desc--ForceBlocks] [SKIPPED] >> test.py::test[limit-yql-8046_empty_sorted_desc--Results] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-window_asstruct-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-ForceBlocks] >> test.py::test[produce-reduce_multi_in_ref--ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_ref--Results] >> test.py::test[select-one_unlabeled_column-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-ForceBlocks] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--ForceBlocks] [GOOD] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Results] >> test.py::test[type_v3-uuid--ForceBlocks] [GOOD] >> test.py::test[udf-udaf_distinct--Results] [GOOD] >> test.py::test[view-file_inner--ForceBlocks] >> test.py::test[blocks-combine_all_decimal--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_hashed_set--Results] [GOOD] >> test.py::test[limit-limit--Results] [GOOD] >> test.py::test[blocks-combine_hashed_some--Results] >> test.py::test[lineage-if_struct-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-if_struct-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-combinebykey_fields_subset_range--ForceBlocks] >> test.py::test[blocks-combine_all_decimal--Results] >> test.py::test[aggregate-aggregate_udf_nested--Results] [GOOD] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-Results] >> test.py::test[window-distinct_over_window--Results] [GOOD] >> test.py::test[window-generic/aggregations_mixed_leadlag--Results] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-off-ForceBlocks] |54.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[type_v3-uuid--ForceBlocks] [GOOD] |54.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[window-yql-15636-default.txt-Results] [GOOD] >> test.py::test[join-join_cbo_3_tables--ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q64-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q84-default.txt-Results] >> test.py::test[key_filter-pushdown_keyextract_type_adjust-default.txt-Results] [GOOD] >> test.py::test[key_filter-uuid--Results] >> test.py::test[join-join_cbo_3_tables--Results] >> test.py::test[view-view_with_lambda--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_combine--Results] >> test.py::test[table_range-concat_empty_sorted_with_key_diff--Results] [GOOD] >> test.py::test[tpch-q17-default.txt-ForceBlocks] >> test.py::test[produce-process_with_lambda-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-Results] >> test.py::test[optimizers-unused_columns_group_one_of_multi--Results] [GOOD] >> test.py::test[optimizers-yql-12620_stage_multiuse--Results] >> test.py::test[order_by-sort_with_take_limit--ForceBlocks] [GOOD] >> test.py::test[order_by-sort_with_take_limit--Results] >> test.py::test[optimizers-yql-2582_limit_for_join_input--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] >> test.py::test[type_v3-append_struct-default.txt-Results] [GOOD] >> test.py::test[type_v3-decimal_yt_llvm--Results] >> test.py::test[window-row_number_to_map_noncompact-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-row_number_to_map_noncompact-default.txt-Results] >> test.py::test[pg-insert--Results] [GOOD] >> test.py::test[pg-nulls-default.txt-Results] >> test.py::test[join-mergejoin_saves_output_sort_cross--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested--ForceBlocks] >> test.py::test[bigdate-table_explicit_cast-default.txt-ForceBlocks] >> test.py::test[order_by-order_by_dot_column-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_tuple-default.txt-Results] >> test.py::test[window-win_lead_in_mem-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-reduce_multi_in_ref--Results] [GOOD] >> test.py::test[produce-reduce_with_assume_in_subquery--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_assume_in_subquery--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_having--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_having--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_row_repack--ForceBlocks] [SKIPPED] >> test.py::test[produce-reduce_with_python_row_repack--Results] [SKIPPED] |54.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest |54.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> test.py::test[window-win_lead_in_mem-default.txt-Results] >> test.py::test[in-huge_in-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_decimal--Results] [GOOD] >> test.py::test[blocks-combine_all_some_filter--ForceBlocks] >> test.py::test[optimizers-yql-5833-table_content--ForceBlocks] >> test.py::test[order_by-sort_with_take_limit--Results] [GOOD] >> test.py::test[pg-all_data--ForceBlocks] >> test.py::test[join-star_join_mirror--ForceBlocks] [GOOD] >> test.py::test[join-star_join_mirror--Results] >> test.py::test[window-row_number_no_part_multi_input-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-row_number_no_part_multi_input-default.txt-Results] >> test.py::test[join-premap_common_inner_filter--Results] [GOOD] >> test.py::test[join-premap_common_multiparents--Results] |54.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[produce-reduce_with_python_row_repack--Results] [SKIPPED] >> test.py::test[blocks-exists--ForceBlocks] [GOOD] >> test.py::test[flatten_by-flatten_corr_name_column-default.txt-Results] [GOOD] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Results] >> test.py::test[sampling-sample-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-sample-default.txt-Results] >> test.py::test[blocks-exists--Results] >> test.py::test[flatten_by-flatten_dict_by_opt--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_ru_with_window_func--Results] [GOOD] >> test.py::test[aggregate-group_by_session_compact--Results] |54.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> test.py::test[window-win_lead_in_mem-default.txt-Results] [GOOD] >> test.py::test[ytflow-flatten_list--ForceBlocks] [SKIPPED] >> test.py::test[ytflow-flatten_list--Results] [SKIPPED] >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> test.py::test[join-mergejoin_narrows_output_sort--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_narrows_output_sort--Results] >> test.py::test[window-row_number_to_map_noncompact-default.txt-Results] [GOOD] >> test.py::test[window-win_func_rank_by_all--ForceBlocks] >> test.py::test[view-file_inner--ForceBlocks] [GOOD] >> test.py::test[view-file_inner--Results] >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] [GOOD] >> test.py::test[pg-tpcds-q84-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q88-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--Results] >> test.py::test[window-full/session_aliases--Results] [GOOD] >> test.py::test[window-win_expr_bounds--Results] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-Results] |54.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[flatten_by-flatten_dict_by_opt--ForceBlocks] [GOOD] |54.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] |54.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[ytflow-flatten_list--Results] [SKIPPED] >> test.py::test[type_v3-decimal_yt_llvm--Results] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--Results] >> test.py::test[tpch-q3-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Results] >> test.py::test[insert-yql-13083--ForceBlocks] [GOOD] >> test.py::test[insert-yql-13083--Results] >> test.py::test[aggregate-group_by_rollup_aggr_expr--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_rollup_aggr_expr--Results] |54.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> test.py::test[tpch-q3-default.txt-Results] >> test.py::test[join-join_cbo_3_tables--Results] [GOOD] >> test.py::test[join-lookupjoin_take_skip--ForceBlocks] [SKIPPED] >> test.py::test[join-lookupjoin_take_skip--Results] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-ForceBlocks] >> test.py::test[blocks-exists--Results] [GOOD] >> test.py::test[blocks-mul_uint64_opt2--ForceBlocks] >> test.py::test[key_filter-utf8_with_legacy--ForceBlocks] [GOOD] >> test.py::test[key_filter-utf8_with_legacy--Results] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_all_list_stream--Results] [SKIPPED] >> test.py::test[produce-reduce_all_with_python_input_stream-dq_fail-Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap--Results] |54.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[optimizers-yql-2582_limit_for_join_input--Results] [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset_range--ForceBlocks] [GOOD] >> test.py::test[optimizers-combinebykey_fields_subset_range--Results] >> test.py::test[sampling-sample-default.txt-Results] [GOOD] >> test.py::test[sampling-sort-default.txt-ForceBlocks] >> test.py::test[blocks-combine_hashed_some--Results] [GOOD] >> test.py::test[optimizers-yql-12620_stage_multiuse--Results] [GOOD] >> test.py::test[blocks-date_greater--Results] >> test.py::test[optimizers-yql-5978_fill_multi_usage--Results] >> test.py::test[view-file_inner--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map_combine--ForceBlocks] |54.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> test.py::test[action-eval_atom_wrong_type_param--ForceBlocks] [SKIPPED] >> test.py::test[action-eval_atom_wrong_type_param--Results] [SKIPPED] >> test.py::test[action-evaluate_match_type-default.txt-ForceBlocks] >> test.py::test[aggregate-aggregate_with_default_yson_options-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--Results] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_non_optional_left_only_single-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-off-ForceBlocks] >> test.py::test[blocks-distinct_mixed_keys--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_all--Results] >> IcbAsActorTests::TestHttpPostReaction >> IcbAsActorTests::TestHttpPostReaction [GOOD] >> test.py::test[window-row_number_no_part_multi_input-default.txt-Results] [GOOD] >> test.py::test[window-win_func_auto_arg-default.txt-ForceBlocks] >> test.py::test[pg-tpch-q22-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_expr-default.txt-Results] >> test.py::test[weak_field-optimize_weak_fields_combine--Results] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] >> test.py::test[order_by-order_by_tuple-default.txt-Results] [GOOD] >> test.py::test[pg-doubles_search_path-default.txt-Results] |54.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> test.py::test[optimizers-sort_over_sorted_prefix_keys-default.txt-Results] [GOOD] >> test.py::test[join-star_join_mirror--Results] [GOOD] >> test.py::test[join-star_join_semionly_premap--ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_coalesce_without_left_semi_2-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_fail--ForceBlocks] |54.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_with_different_key_names_nonsorted--Results] [GOOD] >> test.py::test[join-no_empty_join_for_dyn--ForceBlocks] [SKIPPED] >> test.py::test[join-no_empty_join_for_dyn--Results] |54.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/control/ut/unittest >> test.py::test[join-no_empty_join_for_dyn--Results] [SKIPPED] >> test.py::test[join-star_join_mirror-off-ForceBlocks] >> test.py::test[tpch-q17-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q17-default.txt-Results] >> test.py::test[insert-yql-13083--Results] [GOOD] >> test.py::test[insert_monotonic-keep_meta-default.txt-ForceBlocks] >> test.py::test[join-mergejoin_narrows_output_sort--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort-off-ForceBlocks] >> test.py::test[optimizers-combinebykey_fields_subset_range--Results] [GOOD] >> test.py::test[optimizers-flatmap_with_non_struct_out--ForceBlocks] >> test.py::test[flatten_by-flatten_with_subquery-default.txt-Results] [GOOD] >> test.py::test[hor_join-double_input-default.txt-Results] >> test.py::test[optimizers-flatmap_with_non_struct_out--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-flatmap_with_non_struct_out--Results] [SKIPPED] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-ForceBlocks] |54.9%| [TA] $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[tpch-q3-default.txt-Results] [GOOD] >> test.py::test[union_all-mix_map_and_project-trivial_map-ForceBlocks] >> test.py::test[join-mergejoin_saves_output_sort_nested--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested--Results] >> test.py::test[optimizers-yql-5833-table_content--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-5833-table_content--Results] >> test.py::test[pg-tpch-q11-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q16-default.txt-Results] >> test.py::test[blocks-combine_all_some_filter--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_some_filter--Results] >> test.py::test[key_filter-utf8_with_legacy--Results] [GOOD] >> test.py::test[type_v3-decimal_yt_nollvm--Results] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs--Results] >> test.py::test[pg-nulls-default.txt-Results] [GOOD] >> test.py::test[pg-select_from_columns-default.txt-Results] >> test.py::test[pg-all_data--ForceBlocks] [GOOD] >> test.py::test[pg-all_data--Results] >> test.py::test[pg-tpcds-q88-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q05-default.txt-Results] >> test.py::test[aggregate-group_by_rollup_aggr_expr--Results] [GOOD] >> test.py::test[aggregate-group_by_session_extended_subset--ForceBlocks] >> test.py::test[key_filter-uuid--Results] [GOOD] >> test.py::test[key_filter-yql_5895_or-default.txt-Results] >> test.py::test[simple_columns-simple_columns_join_fail--ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_fail--Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-ForceBlocks] |54.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[key_filter-utf8_with_legacy--Results] [GOOD] >> test.py::test[window-generic/aggregations_mixed_leadlag--Results] [GOOD] >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-Results] >> test.py::test[blocks-mul_uint64_opt2--ForceBlocks] [GOOD] >> test.py::test[blocks-mul_uint64_opt2--Results] >> test.py::test[bigdate-table_explicit_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_explicit_cast-default.txt-Results] >> test.py::test[window-win_expr_bounds--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort--Results] >> test.py::test[blocks-combine_all_some_filter--Results] [GOOD] >> test.py::test[window-win_func_rank_by_all--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_sum--ForceBlocks] >> test.py::test[window-win_func_rank_by_all--Results] >> test.py::test[in-huge_in-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-huge_in-default.txt-Results] >> test.py::test[tpch-q17-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-5833-table_content--Results] [GOOD] >> test.py::test[order_by-literal_with_assume_desc--ForceBlocks] >> test.py::test[sampling-sort-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-sort-default.txt-Results] >> test.py::test[weak_field-optimize_weak_fields_map_combine--ForceBlocks] [GOOD] >> test.py::test[weak_field-optimize_weak_fields_map_combine--Results] >> test.py::test[aggregate-group_by_session_compact--Results] [GOOD] >> test.py::test[aggregate-group_by_session_extended--Results] >> test.py::test[action-evaluate_match_type-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-evaluate_match_type-default.txt-Results] >> test.py::test[sampling-bind_expr-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_join_right-default.txt-Results] [SKIPPED] >> test.py::test[sampling-join_left_sample-default.txt-Results] >> test.py::test[pg-all_data--Results] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort_nested--Results] [GOOD] >> test.py::test[join-premap_common_multiparents-off-ForceBlocks] >> test.py::test[blocks-mul_uint64_opt2--Results] [GOOD] >> test.py::test[blocks-not_opt--ForceBlocks] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-Results] [SKIPPED] |54.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[tpch-q17-default.txt-Results] [GOOD] >> test.py::test[pg-doubles_search_path-default.txt-Results] [GOOD] >> test.py::test[pg-join_using_tables1-default.txt-Results] >> test.py::test[join-star_join_semionly_premap--ForceBlocks] [GOOD] >> test.py::test[join-star_join_semionly_premap--Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_partial_uniq_keys-off-ForceBlocks] >> test.py::test[action-action_nested_query-default.txt-Results] >> test.py::test[optimizers-test_fuse_map_take-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Results] >> test.py::test[hor_join-double_input-default.txt-Results] [GOOD] >> test.py::test[hor_join-empty_out_hor_join-default.txt-Results] >> test.py::test[join-mapjoin_unused_keys--Results] >> test.py::test[produce-reduce_multi_in_stage_and_flatmap--Results] [GOOD] >> test.py::test[ql_filter-integer_escaping--Results] >> test.py::test[join-mapjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-premap_common_multiparents--Results] [GOOD] >> test.py::test[join-premap_common_multiparents_no_premap-off-Results] [SKIPPED] >> test.py::test[join-premap_nonseq_flatmap--Results] >> test.py::test[join-mergejoin_with_table_range-off-Results] >> test.py::test[join-mergejoin_with_table_range-off-Results] [SKIPPED] >> test.py::test[join-nested_semi_join-off-Results] [SKIPPED] >> test.py::test[join-premap_common_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_map_cross--Results] |54.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[pg-all_data--Results] [GOOD] >> test.py::test[window-win_func_rank_by_all--Results] [GOOD] >> test.py::test[window-win_multiaggr-default.txt-ForceBlocks] >> test.py::test[sampling-sort-default.txt-Results] [GOOD] >> test.py::test[sampling-subquery_sort-default.txt-ForceBlocks] >> test.py::test[insert_monotonic-keep_meta-default.txt-ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-keep_meta-default.txt-Results] >> test.py::test[pg-select_from_columns-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q21-default.txt-Results] >> test.py::test[key_filter-empty_range_over_dynamic--Results] |54.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part9/pytest >> test.py::test[join-mapjoin_on_complex_type_optional_left_only_single-off-Results] [SKIPPED] >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] [GOOD] >> test.py::test[udf-named_args_for_script_with_posargs--Results] [GOOD] >> test.py::test[union_all-infer_3-default.txt-Results] >> test.py::test[weak_field-optimize_weak_fields_map_combine--Results] [GOOD] >> test.py::test[weak_field-weak_field_join_where--ForceBlocks] >> test.py::test[action-evaluate_match_type-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt-ForceBlocks] >> test.py::test[distinct-distinct_star-default.txt-Results] >> test.py::test[union_all-mix_map_and_project-trivial_map-ForceBlocks] [GOOD] >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] >> test.py::test[in-huge_in-default.txt-Results] [GOOD] >> test.py::test[in-in_scalar_vector_subquery-default.txt-ForceBlocks] >> test.py::test[insert-part_sortness-desc-Results] >> test.py::test[key_filter-yql_5895_or-default.txt-Results] [GOOD] >> test.py::test[like-like_clause-default.txt-Results] >> test.py::test[aggregate-group_by_column_alias_reuse_for_join--Results] [GOOD] >> test.py::test[aggregate-group_by_cube_expr_trio--Results] >> test.py::test[bigdate-tz_table_rw--Results] |54.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |54.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |54.9%| [TA] {RESULT} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |54.9%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica >> test.py::test[bigdate-table_explicit_cast-default.txt-Results] [GOOD] >> test.py::test[bigdate-tz_table_pull--ForceBlocks] >> test.py::test[optimizers-yql-5978_fill_multi_usage--Results] [GOOD] >> test.py::test[optimizers-yt_shuffle_by_keys--Results] [SKIPPED] >> test.py::test[order_by-assume_with_filter--Results] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-Results] >> test.py::test[insert_monotonic-keep_meta-default.txt-Results] [GOOD] >> test.py::test[join-bush_dis_in--ForceBlocks] >> test.py::test[window-win_func_auto_arg-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_func_auto_arg-default.txt-Results] >> test.py::test[join-star_join_mirror-off-ForceBlocks] [GOOD] |54.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part7/pytest >> test.py::test[weak_field-optimize_weak_fields_filter_combine--Results] [GOOD] >> test.py::test[optimizers-test_fuse_map_take-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--ForceBlocks] >> test.py::test[join-star_join_semionly_premap--Results] [GOOD] >> test.py::test[join-star_join_semionly_premap-off-ForceBlocks] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-ForceBlocks] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] >> test.py::test[insert-append_sorted-to_sorted-Results] >> test.py::test[order_by-literal_with_assume_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-literal_with_assume_desc--Results] >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] [GOOD] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-ForceBlocks] |54.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> test.py::test[blocks-not_opt--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_sum--ForceBlocks] [GOOD] |54.9%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |54.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |54.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[join-star_join_mirror-off-ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_sum--Results] >> test.py::test[blocks-not_opt--Results] >> test.py::test[aggregate-group_by_session_extended_subset--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_session_extended_subset--Results] |54.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |54.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |54.9%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |54.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |54.9%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |54.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |55.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |55.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |55.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication >> test.py::test[window-win_func_auto_arg_selective_rank-default.txt-Results] [GOOD] >> test.py::test[ypath-empty_range-dynamic-Results] [SKIPPED] >> test.py::test[ytflow-file--Results] >> test.py::test[union_all-infer_3-default.txt-Results] [GOOD] >> test.py::test[view-file_inner--Results] >> test.py::test[action-action_nested_query-default.txt-Results] [GOOD] >> test.py::test[action-dep_world_action_quote-default.txt-Results] >> test.py::test[ytflow-file--Results] [SKIPPED] >> test.py::test[ytflow-flatten_list--Results] [SKIPPED] >> test.py::test[ytflow-udf--Results] |55.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |55.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |55.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> test.py::test[ql_filter-integer_escaping--Results] [GOOD] >> test.py::test[sampling-bind_join_left-default.txt-Results] [SKIPPED] >> test.py::test[pg-tpcds-q21-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q23-default.txt-Results] >> test.py::test[ytflow-udf--Results] [SKIPPED] >> test.py::test[sampling-subquery_limit-default.txt-Results] >> test.py::test[sampling-subquery_sort-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-subquery_sort-default.txt-Results] |55.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |55.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |55.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots >> test.py::test[case-case_then_else-default.txt-Results] >> test.py::test[order_by-literal_with_assume_desc--Results] [GOOD] >> test.py::test[optimizers-yql-7532_wrong_field_subset_for_calcoverwindow-default.txt-Results] [GOOD] >> test.py::test[order_by-assume_over_input_desc--ForceBlocks] >> test.py::test[window-win_func_aggr_4func_sort--Results] [GOOD] >> test.py::test[window-win_func_over_group_by--Results] >> test.py::test[order_by-order_by_expr--ForceBlocks] >> test.py::test[key_filter-empty_range_over_dynamic--Results] [GOOD] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] |55.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tools/query_replay_yt/query_replay_yt |55.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |55.0%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt >> test.py::test[join-premap_common_multiparents-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_multiparents-off-Results] [SKIPPED] >> test.py::test[join-premap_common_semi--ForceBlocks] >> test.py::test[pg-join_using_tables1-default.txt-Results] [GOOD] >> test.py::test[pg-select_limit-default.txt-Results] >> test.py::test[sampling-join_left_sample-default.txt-Results] [GOOD] |55.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |55.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |55.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut >> test.py::test[join-mapjoin_partial_uniq_keys-off-ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_partial_uniq_keys-off-Results] >> test.py::test[window-win_func_auto_arg-default.txt-Results] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--ForceBlocks] >> test.py::test[sampling-yql-14664_deps-default.txt-Results] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Results] >> test.py::test[aggregate-group_by_session_extended--Results] [GOOD] >> test.py::test[aggregate-no_compact_distinct--Results] [SKIPPED] >> test.py::test[aggregate-percentile_and_variance--Results] >> test.py::test[blocks-not_opt--Results] [GOOD] >> test.py::test[blocks-string_pass--ForceBlocks] >> test.py::test[join-mapjoin_partial_uniq_keys-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_force_align2--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_force_align2--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names--ForceBlocks] >> test.py::test[bigdate-tz_table_rw--Results] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt-Results] >> test.py::test[like-like_clause-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_join_without_resolve_dublicates_mult-default.txt-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-ForceBlocks] >> test.py::test[insert-part_sortness-desc-Results] [GOOD] >> test.py::test[join-anyjoin_common_nodup--Results] >> test.py::test[like-like_clause_no_pattern-default.txt-Results] >> test.py::test[window-win_multiaggr-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_multiaggr-default.txt-Results] |55.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part8/pytest >> test.py::test[ytflow-udf--Results] [SKIPPED] |55.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] >> test.py::test[order_by-assume_with_filter--Results] [GOOD] >> test.py::test[join-premap_map_cross--Results] [GOOD] >> test.py::test[join-premap_map_cross-off-Results] [SKIPPED] >> test.py::test[order_by-native_desc_sort-over_sorted-Results] [SKIPPED] >> test.py::test[order_by-order_by_tablerecord_column--Results] |55.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |55.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview >> test.py::test[pg-tpch-q05-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q19-default.txt-Results] >> test.py::test[join-premap_map_inner--Results] >> test.py::test[sampling-subquery_sort-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt-ForceBlocks] [GOOD] >> test.py::test[action-subquery_merge_nested_world-default.txt-Results] >> test.py::test[blocks-combine_all_sum--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys--ForceBlocks] >> test.py::test[sampling-system_sampling--ForceBlocks] [SKIPPED] >> test.py::test[sampling-system_sampling--Results] [SKIPPED] >> test.py::test[schema-copy-other-ForceBlocks] >> test.py::test[blocks-distinct_opt_state_all--Results] [GOOD] >> test.py::test[blocks-interval_add_interval_scalar--Results] |55.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |55.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |55.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots >> test.py::test[bigdate-tz_table_pull--ForceBlocks] [GOOD] >> test.py::test[bigdate-tz_table_pull--Results] >> test.py::test[aggregate-group_by_session_extended_subset--Results] [GOOD] >> test.py::test[aggregate-percentile_and_avg_grouped--ForceBlocks] >> test.py::test[join-premap_nonseq_flatmap--Results] [GOOD] >> test.py::test[join-pullup_context_dep--Results] >> test.py::test[blocks-date_greater--Results] [GOOD] >> test.py::test[blocks-date_not_equals--Results] >> test.py::test[join-mergejoin_saves_output_sort-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_saves_output_sort-off-Results] >> test.py::test[join-mergejoin_saves_output_sort-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_unmatched--ForceBlocks] [SKIPPED] >> test.py::test[join-mergejoin_saves_output_sort_unmatched--Results] [SKIPPED] >> test.py::test[join-mergejoin_sorts_output_for_sort_nomatch--ForceBlocks] [SKIPPED] >> test.py::test[weak_field-weak_field_join_where--ForceBlocks] [GOOD] >> test.py::test[weak_field-weak_field_join_where--Results] >> test.py::test[join-mergejoin_sorts_output_for_sort_nomatch--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_nested-off-ForceBlocks] >> test.py::test[hor_join-empty_out_hor_join-default.txt-Results] [GOOD] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Results] >> test.py::test[hor_join-fuse_multi_usage-outlimit-Results] [SKIPPED] >> test.py::test[hor_join-out_table_record-default.txt-Results] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-Results] >> test.py::test[bigdate-tz_table_pull--Results] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt-ForceBlocks] >> test.py::test[join-star_join_semionly_premap-off-ForceBlocks] [GOOD] >> test.py::test[join-star_join_semionly_premap-off-Results] [SKIPPED] >> test.py::test[action-subquery_merge_nested_world-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-count_if-default.txt-ForceBlocks] >> test.py::test[window-win_multiaggr-default.txt-Results] [GOOD] >> test.py::test[ypath-complex-default.txt-ForceBlocks] >> test.py::test[join-yql-12022--ForceBlocks] >> test.py::test[select-from_in_front_sub-default.txt-Results] >> test.py::test[join-bush_dis_in--ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in--Results] >> test.py::test[view-file_inner--Results] [GOOD] >> test.py::test[weak_field-yql-7888_mapfieldsubset--Results] >> test.py::test[pg-tpch-q16-default.txt-Results] [GOOD] >> test.py::test[pragma-release_temp_data_chain_pull_fail--Results] [SKIPPED] >> test.py::test[produce-process_multi_in--Results] >> test.py::test[produce-process_multi_in--Results] [SKIPPED] >> test.py::test[sampling-subquery_limit-default.txt-Results] [GOOD] >> test.py::test[sampling-take_with_sampling-default.txt-Results] >> test.py::test[action-combine_subqueries_with_table_param-default.txt-Results] [GOOD] >> test.py::test[case-case_then_else-default.txt-Results] [GOOD] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] >> test.py::test[action-eval_if-default.txt-Results] |55.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |55.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |55.1%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut >> test.py::test[pg-tpcds-q97-default.txt-Results] >> test.py::test[union_all-union_all_with_discard_into_result_ansi-default.txt-Results] [GOOD] >> test.py::test[pg-select_limit-default.txt-Results] [GOOD] >> test.py::test[pg-select_table2-default.txt-Results] >> test.py::test[binding-table_filter_binding-default.txt-Results] [GOOD] >> test.py::test[binding-table_from_binding-default.txt-Results] >> test.py::test[weak_field-yql-7888_mapfieldsubset--ForceBlocks] >> test.py::test[key_filter-range_union_lower_excluded-default.txt-Results] [GOOD] >> test.py::test[key_filter-string_with_legacy--Results] >> test.py::test[order_by-order_by_expr--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_expr--Results] >> test.py::test[join-premap_common_semi--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_semi--Results] >> test.py::test[blocks-string_pass--ForceBlocks] [GOOD] >> test.py::test[blocks-string_pass--Results] >> test.py::test[optimizers-yql-18733_no_filter_multiusage_pushdown--Results] [GOOD] >> test.py::test[order_by-native_desc_sort-over_sorted-ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_sort-over_sorted-Results] [SKIPPED] >> test.py::test[pg-join_using_multiple2--ForceBlocks] >> test.py::test[like-like_clause_no_pattern-default.txt-Results] [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt-Results] >> test.py::test[pg-tpcds-q23-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q24-default.txt-Results] >> test.py::test[insert-append_sorted-to_sorted-Results] [GOOD] >> test.py::test[insert-insert_from_other--Results] >> test.py::test[weak_field-weak_field_join_where--Results] [GOOD] >> test.py::test[window-current/ansi_current_with_win--ForceBlocks] >> test.py::test[order_by-assume_over_input_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-assume_over_input_desc--Results] >> test.py::test[blocks-interval_add_interval_scalar--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_nested--Results] |55.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[produce-process_multi_in--Results] [SKIPPED] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-ForceBlocks] [GOOD] >> test.py::test[union_all-mix_map_and_project-trivial_map-Results] [GOOD] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] >> test.py::test[union_all-union_all_with_limits-default.txt-Results] >> test.py::test[sampling-yql-14664_deps-default.txt-Results] [GOOD] >> test.py::test[sampling-zero_percentage--Results] >> test.py::test[order_by-order_by_tablerecord_column--Results] [GOOD] >> test.py::test[pg-aggregate_minus_zero--Results] >> test.py::test[schema-copy-other-ForceBlocks] [GOOD] >> test.py::test[schema-copy-other-Results] >> test.py::test[join-premap_map_inner--Results] [GOOD] >> test.py::test[join-premap_merge_extrasort2-off-Results] [SKIPPED] >> test.py::test[pg-select_table1-default.txt-ForceBlocks] >> test.py::test[join-pullup_renaming-off-Results] [SKIPPED] >> test.py::test[join-pullup_rownumber-off-Results] [SKIPPED] >> test.py::test[join-simple_columns_partial-off-Results] >> test.py::test[action-dep_world_action_quote-default.txt-Results] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-Results] >> test.py::test[order_by-order_by_expr--Results] [GOOD] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-ForceBlocks] >> test.py::test[join-simple_columns_partial-off-Results] [SKIPPED] >> test.py::test[join-star_join_inners-off-Results] [SKIPPED] >> test.py::test[join-star_join_inners_vk_sorted--Results] >> test.py::test[order_by-assume_over_input_desc--Results] [GOOD] >> test.py::test[blocks-string_pass--Results] [GOOD] >> test.py::test[aggregate-too_wide-default.txt-Results] [GOOD] >> test.py::test[ansi_idents-basic_columns-default.txt-Results] >> test.py::test[aggregate-group_by_cube_expr_trio--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-Results] >> test.py::test[order_by-native_desc_publish--ForceBlocks] [SKIPPED] >> test.py::test[order_by-native_desc_publish--Results] >> test.py::test[aggregate-percentile_and_variance--Results] [GOOD] >> test.py::test[join-bush_dis_in--Results] [GOOD] >> test.py::test[join-bush_dis_in-off-ForceBlocks] >> test.py::test[order_by-native_desc_publish--Results] [SKIPPED] >> test.py::test[pg-select_common_type_unionall--ForceBlocks] >> test.py::test[aggregate-percentile_and_avg_grouped--ForceBlocks] [GOOD] >> test.py::test[action-eval_if-default.txt-Results] [GOOD] >> test.py::test[action-eval_sample--Results] >> test.py::test[aggregate-percentile_and_avg_grouped--Results] >> test.py::test[binding-table_from_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-combine_all_min--Results] >> test.py::test[join-mergejoin_with_different_key_names--ForceBlocks] [GOOD] >> test.py::test[ypath-complex-default.txt-ForceBlocks] [GOOD] >> test.py::test[ypath-complex-default.txt-Results] >> test.py::test[distinct-distinct_star-default.txt-Results] [GOOD] >> test.py::test[dq-blacklisted_pragmas1--Results] >> test.py::test[join-mergejoin_with_different_key_names_nested-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_nested-off-Results] [SKIPPED] >> test.py::test[join-mergejoin_with_different_key_names_norename--ForceBlocks] >> test.py::test[join-premap_common_semi--Results] [GOOD] >> test.py::test[join-premap_common_semi-off-ForceBlocks] >> test.py::test[join-yql-12022--ForceBlocks] [GOOD] >> test.py::test[join-yql-12022--Results] >> test.py::test[dq-blacklisted_pragmas1--Results] [SKIPPED] >> test.py::test[dq-precompute_parallel--Results] [SKIPPED] >> test.py::test[dq-truncate_local-default.txt-Results] [SKIPPED] >> test.py::test[expr-inline_call--Results] >> test.py::test[pg-tpcds-q97-default.txt-Results] [GOOD] >> test.py::test[hor_join-out_table_record-default.txt-Results] [GOOD] >> test.py::test[hor_join-runtime_dep-default.txt-Results] >> test.py::test[pg-tpch-q07-default.txt-Results] >> test.py::test[blocks-distinct_opt_state_keys--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys--Results] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--ForceBlocks] [GOOD] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--Results] >> test.py::test[sampling-take_with_sampling-default.txt-Results] [GOOD] >> test.py::test[schema-def_values_job--Results] >> test.py::test[coalesce-coalesce_few_real-default.txt-Results] [GOOD] >> test.py::test[column_group-groups-single-Results] >> test.py::test[pg-tpch-q19-default.txt-Results] [GOOD] >> test.py::test[produce-process_multi_out_bad_count_fail--Results] >> test.py::test[schema-copy-other-Results] [GOOD] >> test.py::test[schema-copy-yamred_dsv_raw-ForceBlocks] >> test.py::test[join-pullup_context_dep--Results] [GOOD] >> test.py::test[join-pullup_left_semi--Results] >> test.py::test[column_group-groups-single-Results] [SKIPPED] >> test.py::test[column_group-hint-single-Results] [SKIPPED] >> test.py::test[column_group-hint_append--Results] [SKIPPED] >> test.py::test[column_group-hint_diff_grp_fail3--Results] [SKIPPED] >> test.py::test[column_group-hint_non_map_yson_fail--Results] [SKIPPED] >> test.py::test[column_order-join--Results] [SKIPPED] >> test.py::test[column_order-winfunc-default.txt-Results] |55.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[blocks-string_pass--Results] [GOOD] |55.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[aggregate-percentile_and_variance--Results] [GOOD] >> test.py::test[in-in_scalar_vector_subquery-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_scalar_vector_subquery-default.txt-Results] >> test.py::test[binding-table_filter_binding-default.txt-ForceBlocks] [GOOD] >> test.py::test[binding-table_filter_binding-default.txt-Results] >> test.py::test[simple_columns-simple_columns_tablerow-default.txt-Results] [GOOD] >> test.py::test[table_range-prune_partition_list_basic--ForceBlocks] |55.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[join-mergejoin_with_different_key_names--ForceBlocks] [GOOD] >> test.py::test[table_range-prune_partition_list_basic--ForceBlocks] [SKIPPED] >> test.py::test[table_range-prune_partition_list_basic--Results] [SKIPPED] >> test.py::test[table_range-prune_partition_list_dependson--ForceBlocks] [SKIPPED] >> test.py::test[table_range-prune_partition_list_dependson--Results] >> test.py::test[table_range-prune_partition_list_dependson--Results] [SKIPPED] >> test.py::test[table_range-tablepath_with_non_existing--ForceBlocks] >> test.py::test[pg-select_table2-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q04-default.txt-Results] >> test.py::test[window-win_func_over_group_by--Results] [GOOD] >> test.py::test[ypath-complex-default.txt-Results] [GOOD] >> test.py::test[weak_field-yql-7888_mapfieldsubset--Results] [GOOD] >> test.py::test[window-full/syscolumns--Results] >> test.py::test[window-win_multiaggr-default.txt-Results] >> test.py::test[insert-insert_from_other--Results] [GOOD] >> test.py::test[insert-override_view_fail--Results] >> test.py::test[aggr_factory-count_if-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-count_if-default.txt-Results] >> test.py::test[pg-tpcds-q24-default.txt-Results] [GOOD] >> test.py::test[action-eval_if_guard-default.txt-Results] [GOOD] >> test.py::test[limit-empty_sort_after_limit-default.txt-Results] [GOOD] >> test.py::test[limit-insert_with_limit--Results] >> test.py::test[pg-join_using_multiple2--ForceBlocks] [GOOD] >> test.py::test[produce-process_multi_out_bad_count_fail--Results] [GOOD] >> test.py::test[produce-process_sorted_multi_out--Results] >> test.py::test[pg-join_using_multiple2--Results] >> test.py::test[aggr_factory-avg-default.txt-Results] |55.1%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp >> test.py::test[binding-table_filter_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint32--ForceBlocks] >> test.py::test[produce-process_sorted_multi_out--Results] [SKIPPED] >> test.py::test[produce-process_trivial_as_struct-default.txt-Results] |55.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part5/pytest >> test.py::test[ypath-complex-default.txt-Results] [GOOD] >> test.py::test[join-yql-12022--Results] [GOOD] >> test.py::test[sampling-zero_percentage--Results] [GOOD] >> test.py::test[schema-concat--Results] >> test.py::test[join-yql-14829_left--ForceBlocks] >> TSchemeShardSysViewTest::ReadOnlyMode >> test.py::test[blocks-lazy_nonstrict_nested--Results] [GOOD] >> test.py::test[blocks-partial_blocks1--Results] |55.1%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp >> test.py::test[aggregate-percentile_and_avg_grouped--Results] [GOOD] >> test.py::test[aggregate-percentiles_grouped_expr--ForceBlocks] >> test.py::test[pg-select_table1-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_table1-default.txt-Results] >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] >> test.py::test[select-from_in_front_sub-default.txt-Results] [GOOD] >> test.py::test[select-hits_count--Results] >> test.py::test[insert-override_view_fail--Results] [GOOD] >> test.py::test[insert-replace_inferred--Results] >> test.py::test[key_filter-string_with_legacy--Results] [GOOD] >> test.py::test[like-ilike_clause-default.txt-Results] >> test.py::test[union_all-union_all_with_limits-default.txt-Results] [GOOD] >> test.py::test[view-file_outer--Results] >> test.py::test[dq-dq_replicate_ok-default.txt-ForceBlocks] >> test.py::test[window-win_func_lead_lag_worm_with_part_other--Results] [GOOD] >> test.py::test[window-win_func_rank_by_part--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:02:50.867192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:02:50.867221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:02:50.867230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:02:50.867239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:02:50.867246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:02:50.867251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:02:50.867261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:02:50.867276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:02:50.867392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:02:50.867458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:02:50.886792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:02:50.886826Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:02:50.890816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:02:50.890914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:02:50.890962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:02:50.892709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:02:50.892808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:02:50.892933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:02:50.893153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:02:50.893643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:02:50.893697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:02:50.893964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:02:50.893975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:02:50.894001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:02:50.894010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:02:50.894018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:02:50.894043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:02:50.931920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-11-03T14:02:50.932149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-11-03T14:02:50.932168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-11-03T14:02:50.932180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-11-03T14:02:50.932191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-11-03T14:02:50.932225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-11-03T14:02:50.932237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-11-03T14:02:50.932248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-11-03T14:02:50.932262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-11-03T14:02:50.932282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-11-03T14:02:50.932292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-11-03T14:02:50.932303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-11-03T14:02:50.932314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-11-03T14:02:50.932325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2025-11-03T14:02:50.932335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... vel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:02:51.213564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-11-03T14:02:51.213608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000038 2025-11-03T14:02:51.213755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:02:51.213779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:02:51.213791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:45: [72057594046678944] TCreateSysView::TPropose, opId: 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2025-11-03T14:02:51.213825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 102:0 128 -> 240 2025-11-03T14:02:51.213857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2025-11-03T14:02:51.213870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-11-03T14:02:51.214326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:02:51.214338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-11-03T14:02:51.214426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2025-11-03T14:02:51.214448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:02:51.214453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:817:2775], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-11-03T14:02:51.214460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:817:2775], at schemeshard: 72057594046678944, txId: 102, path id: 38 2025-11-03T14:02:51.214564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-11-03T14:02:51.214572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2025-11-03T14:02:51.214587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-11-03T14:02:51.214592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-11-03T14:02:51.214597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 1/1 2025-11-03T14:02:51.214600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-11-03T14:02:51.214605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-11-03T14:02:51.214611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-11-03T14:02:51.214616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-11-03T14:02:51.214621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-11-03T14:02:51.214658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2025-11-03T14:02:51.214668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-11-03T14:02:51.214674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 39 2025-11-03T14:02:51.214678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2025-11-03T14:02:51.214793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 102 2025-11-03T14:02:51.214808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 102 2025-11-03T14:02:51.214814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-11-03T14:02:51.214819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 39 2025-11-03T14:02:51.214825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2025-11-03T14:02:51.214977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-11-03T14:02:51.214993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-11-03T14:02:51.214997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-11-03T14:02:51.215002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2025-11-03T14:02:51.215007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2025-11-03T14:02:51.215018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-11-03T14:02:51.215840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-11-03T14:02:51.215906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-11-03T14:02:51.215981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-11-03T14:02:51.215991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-11-03T14:02:51.216069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-11-03T14:02:51.216086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-11-03T14:02:51.216092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:846:2802] TestWaitNotification: OK eventTxId 102 2025-11-03T14:02:51.216177Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-11-03T14:02:51.216223Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 57us result status StatusSuccess 2025-11-03T14:02:51.216339Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |55.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[pg-tpcds-q24-default.txt-Results] [GOOD] >> test.py::test[weak_field-yql-7888_mapfieldsubset--ForceBlocks] [GOOD] >> test.py::test[expr-inline_call--Results] [GOOD] >> test.py::test[expr-tagged_runtime_null-default.txt-Results] >> test.py::test[weak_field-yql-7888_mapfieldsubset--Results] >> test.py::test[ansi_idents-basic_columns-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_yt_native-default-Results] [SKIPPED] >> test.py::test[binding-table_filter_strict_binding-default.txt-Results] |55.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |55.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |55.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |55.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query_reboots/unittest >> test.py::test[schema-copy-yamred_dsv_raw-ForceBlocks] [GOOD] >> test.py::test[action-eval_sample--Results] [GOOD] >> test.py::test[action-eval_values_output_table_subquery--Results] >> test.py::test[schema-copy-yamred_dsv_raw-Results] >> test.py::test[schema-def_values_job--Results] [GOOD] >> test.py::test[join-premap_common_semi-off-ForceBlocks] [GOOD] >> test.py::test[join-premap_common_semi-off-Results] [SKIPPED] >> test.py::test[join-pullup_extend--ForceBlocks] >> test.py::test[pg-select_table1-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q46-default.txt-ForceBlocks] >> test.py::test[pg-aggregate_minus_zero--Results] [GOOD] >> test.py::test[pg-join_using_tables3-default.txt-Results] |55.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |55.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |55.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |55.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query_reboots/unittest |55.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query_reboots/unittest >> test.py::test[optimizers-fuse_map_mapreduce_multi_input--Results] >> test.py::test[optimizers-fuse_map_mapreduce_multi_input--Results] [SKIPPED] >> test.py::test[optimizers-sort_by_nonstrict_const--ForceBlocks] >> test.py::test[blocks-distinct_opt_state_keys--Results] [GOOD] >> test.py::test[blocks-filter_by_column_with_drop--ForceBlocks] >> test.py::test[aggr_factory-count_if-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-top_by-default.txt-ForceBlocks] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_mul_col--Results] >> test.py::test[optimizers-sort_by_nonstrict_const--ForceBlocks] [SKIPPED] >> test.py::test[join-bush_dis_in_in_in--Results] >> test.py::test[optimizers-sort_by_nonstrict_const--Results] [SKIPPED] >> test.py::test[optimizers-unused_columns_window--ForceBlocks] >> test.py::test[table_range-tablepath_with_non_existing--ForceBlocks] [GOOD] >> test.py::test[table_range-tablepath_with_non_existing--Results] >> test.py::test[join-bush_dis_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in-off-Results] [SKIPPED] >> test.py::test[join-bush_in_in-off-ForceBlocks] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-ForceBlocks] [GOOD] >> test.py::test[view-file_outer--Results] [GOOD] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-Results] >> test.py::test[view-file_outer_library--Results] >> test.py::test[window-current/ansi_current_with_win--ForceBlocks] [GOOD] >> test.py::test[window-current/ansi_current_with_win--Results] >> TStreamingQueryTestReboots::SimpleDropStreamingQuery |55.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[schema-def_values_job--Results] [GOOD] >> test.py::test[blocks-date_not_equals--Results] [GOOD] >> test.py::test[blocks-decimal_multiplicative_ops--Results] >> test.py::test[schema-copy-yamred_dsv_raw-Results] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-ForceBlocks] >> test.py::test[hor_join-runtime_dep-default.txt-Results] [GOOD] >> test.py::test[limit-insert_with_limit--Results] [GOOD] >> test.py::test[limit-limit--Results] >> test.py::test[pg-join_using_multiple2--Results] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-ForceBlocks] >> test.py::test[blocks-combine_all_min--Results] [GOOD] >> test.py::test[blocks-combine_all_sum_filter--Results] >> test.py::test[column_order-winfunc-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_table_sort_asc--Results] >> test.py::test[pg-tpcds-q04-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q10-default.txt-Results] >> test.py::test[blocks-partial_blocks1--Results] [GOOD] >> test.py::test[join-pullup_left_semi--Results] [GOOD] >> test.py::test[join-pullup_null_column--Results] >> test.py::test[weak_field-yql-7888_mapfieldsubset--Results] [GOOD] >> test.py::test[window-win_multiaggr-default.txt-Results] [GOOD] >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] >> test.py::test[schema-concat--Results] [GOOD] >> test.py::test[schema-copy-schema-Results] >> test.py::test[window-full/syscolumns--ForceBlocks] |55.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query_reboots/unittest >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> test.py::test[produce-process_trivial_as_struct-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_lambda_outstream-default.txt-Results] >> test.py::test[join-mergejoin_with_different_key_names_norename--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename--Results] >> test.py::test[join-anyjoin_common_nodup--Results] [GOOD] >> test.py::test[join-commonjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-count_bans--Results] >> test.py::test[blocks-add_uint32--ForceBlocks] [GOOD] >> test.py::test[blocks-add_uint32--Results] >> test.py::test[in-in_scalar_vector_subquery-default.txt-Results] [GOOD] >> test.py::test[in-in_types_cast-default.txt-ForceBlocks] >> test.py::test[table_range-tablepath_with_non_existing--Results] [GOOD] >> test.py::test[tpch-q4-default.txt-ForceBlocks] >> test.py::test[insert-replace_inferred--Results] [GOOD] >> test.py::test[insert-trivial_literals-default.txt-Results] >> test.py::test[binding-table_filter_strict_binding-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint64_opt--Results] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> test.py::test[pg-select_common_type_unionall--ForceBlocks] [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit |55.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[hor_join-runtime_dep-default.txt-Results] [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> test.py::test[pg-select_common_type_unionall--Results] >> test.py::test[join-star_join_inners_vk_sorted--Results] [GOOD] >> test.py::test[join-star_join_with_diff_complex_key--Results] [SKIPPED] >> test.py::test[join-yql-10654_pullup_with_sys_columns-off-Results] [SKIPPED] >> test.py::test[join-yql-14829_left-off-Results] >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> test.py::test[view-file_outer_library--Results] [GOOD] >> test.py::test[view-secure--Results] >> test.py::test[join-yql-14829_left-off-Results] [SKIPPED] >> test.py::test[join-yql-8131-off-Results] [SKIPPED] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> test.py::test[like-ilike_clause-default.txt-Results] [GOOD] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-Results] |55.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part1/pytest >> test.py::test[blocks-partial_blocks1--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-11-03T14:02:56.195592Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.195601Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.195606Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:56.195741Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:56.195916Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-11-03T14:02:56.195943Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.196643Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.196648Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.196652Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:56.196714Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:56.196758Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-11-03T14:02:56.196768Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.196976Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.196979Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.196982Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:56.197054Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-11-03T14:02:56.197068Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.197072Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.197110Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-11-03T14:02:56.197326Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.197330Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.197333Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:56.197407Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-11-03T14:02:56.197413Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.197416Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.197424Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-11-03T14:02:56.197648Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-11-03T14:02:56.197653Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-11-03T14:02:56.197656Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:56.197706Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:56.197814Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:56.198914Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-11-03T14:02:56.198983Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:56.199064Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-11-03T14:02:56.199467Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-11-03T14:02:56.199528Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:56.199536Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-11-03T14:02:56.199541Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-11-03T14:02:56.199545Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-11-03T14:02:56.199551Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-11-03T14:02:56.199554Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-11-03T14:02:56.199558Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-11-03T14:02:56.199562Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-11-03T14:02:56.199569Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-11-03T14:02:56.199573Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-11-03T14:02:56.199577Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-11-03T14:02:56.199582Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-11-03T14:02:56.199586Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-11-03T14:02:56.199589Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-11-03T14:02:56.199593Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-11-03T14:02:56.199597Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-11-03T14:02:56.199607Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-11-03T14:02:56.199610Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-11-03T14:02:56.199614Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-11-03T14:02:56.199618Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-11-03T14:02:56.199621Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-11-03T14:02:56.199625Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-11-03T14:02:56.199629Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-11-03T14:02:56.199633Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-11-03T14:02:56.199637Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-11-03T14:02:56.199641Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-11-03T14:02:56.199644Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-11-03T14:02:56.199648Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-11-03T14:02:56.199652Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-11-03T14:02:56.199656Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-11-03T14:02:56.199660Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-11-03T14:02:56.199664Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-11-03T14:02:56.199680Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-11-03T14:02:56.199683Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-11-03T14:02:56.199688Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-11-03T14:02:56.199691Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-11-03T14:02:56.199697Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-11-03T14:02:56.199701Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-11-03T14:02:56.199705Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-11-03T14:02:56.199708Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-11-03T14:02:56.199712Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-11-03T14:02:56.199716Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-11-03T14:02:56.199720Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-11-03T14:02:56.199724Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-11-03T14:02:56.199727Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-11-03T14:02:56.199732Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-11-03T14:02:56.199736Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-11-03T14:02:56.199739Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-11-03T14:02:56.199743Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-11-03T14:02:56.199748Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-11-03T14:02:56.199758Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-11-03T14:02:56.199831Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-11-03T14:02:56.199860Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-11-03T14:02:56.199865Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-11-03T14:02:56.199870Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-11-03T14:02:56.199874Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-11-03T14:02:56.199879Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-11-03T14:02:56.199883Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-11-03T14:02:56.199886Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-11-03T14:02:56.199890Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-11-03T14:02:56.199895Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-11-03T14:02:56.199899Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-11-03T14:02:56.199903Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-11-03T14:02:56.199907Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-11-03T14:02:56.199911Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-11-03T14:02:56.199915Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-11-03T14:02:56.199918Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-11-03T14:02:56.199923Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-11-03T14:02:56.199930Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-11-03T14:02:56.199933Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-11-03T14:02:56.199937Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-11-03T14:02:56.199941Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-11-03T14:02:56.199945Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-11-03T14:02:56.199949Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-11-03T14:02:56.199953Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-11-03T14:02:56.199957Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-11-03T14:02:56.199961Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-11-03T14:02:56.199964Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-11-03T14:02:56.199968Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-11-03T14:02:56.199972Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-11-03T14:02:56.199976Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-11-03T14:02:56.199980Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-11-03T14:02:56.199984Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-11-03T14:02:56.199988Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-11-03T14:02:56.199997Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-11-03T14:02:56.200002Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-11-03T14:02:56.200006Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-11-03T14:02:56.200011Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-11-03T14:02:56.200014Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-11-03T14:02:56.200018Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-11-03T14:02:56.200022Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-11-03T14:02:56.200026Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-11-03T14:02:56.200029Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-11-03T14:02:56.200033Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-11-03T14:02:56.200037Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-11-03T14:02:56.200041Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-11-03T14:02:56.200045Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-11-03T14:02:56.200048Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-11-03T14:02:56.200052Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-11-03T14:02:56.200056Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-11-03T14:02:56.200060Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-11-03T14:02:56.200064Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-11-03T14:02:56.200069Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-11-03T14:02:56.200096Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-11-03T14:02:56.200353Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.200357Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.200360Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:56.200437Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:56.200509Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:56.200541Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.200597Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:56.301830Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.301944Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-11-03T14:02:56.301979Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:56.301987Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-11-03T14:02:56.302018Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-11-03T14:02:56.506518Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-11-03T14:02:56.606732Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-11-03T14:02:56.606787Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-11-03T14:02:56.606832Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-11-03T14:02:56.607275Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.607280Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.607284Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:56.607361Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:56.607557Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:56.607635Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.609164Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:56.710934Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.711214Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-11-03T14:02:56.711237Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:56.711246Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-11-03T14:02:56.711274Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-11-03T14:02:56.711301Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-11-03T14:02:56.711378Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-11-03T14:02:56.711401Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-11-03T14:02:56.711429Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> test.py::test[action-eval_values_output_table_subquery--Results] [GOOD] >> test.py::test[action-subquery_merge1-default.txt-Results] >> test.py::test[order_by-order_by_expr_with_deps-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--ForceBlocks] >> Compression::WriteRAW |55.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[join-yql-8131-off-Results] [SKIPPED] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-11-03T14:02:56.894377Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.894388Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.894393Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:56.910720Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-11-03T14:02:56.910759Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.910765Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.910812Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007150s 2025-11-03T14:02:56.921443Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:56.932050Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-11-03T14:02:56.932124Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.943060Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.943070Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.943074Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:56.954696Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-11-03T14:02:56.954723Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.954728Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.954752Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005391s 2025-11-03T14:02:56.956975Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:56.958751Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-11-03T14:02:56.958790Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.962960Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.962969Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.962973Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:56.972202Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-11-03T14:02:56.972225Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.972232Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:56.972257Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.235265s 2025-11-03T14:02:56.979291Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:56.994884Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-11-03T14:02:56.994938Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.002994Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.003003Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.003007Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:57.010713Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-11-03T14:02:57.010737Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.010743Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.010769Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.173146s 2025-11-03T14:02:57.010963Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:57.014756Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-11-03T14:02:57.014795Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.031008Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.031018Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.031023Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:57.038693Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:57.048133Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:57.049920Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.050080Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-11-03T14:02:57.050088Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.050092Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.050100Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.231494s 2025-11-03T14:02:57.050199Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-11-03T14:02:57.068619Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.068629Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.068636Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:57.074716Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:57.086811Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:57.086912Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.090687Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:57.254714Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.258737Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-11-03T14:02:57.258780Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:57.258791Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-11-03T14:02:57.258822Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-11-03T14:02:57.366821Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-11-03T14:02:57.366897Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-11-03T14:02:57.379028Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.379038Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.379043Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:57.382710Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:57.394753Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:57.418083Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.418329Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:57.518781Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:57.518847Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-11-03T14:02:57.518866Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:57.518874Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-11-03T14:02:57.518903Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-11-03T14:02:57.518926Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-11-03T14:02:57.523071Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2025-11-03T14:02:57.523113Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-11-03T14:02:57.523164Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> test.py::test[blocks-add_uint32--Results] [GOOD] >> test.py::test[blocks-add_uint8--ForceBlocks] >> test.py::test[dq-dq_replicate_ok-default.txt-ForceBlocks] [GOOD] >> test.py::test[dq-dq_replicate_ok-default.txt-Results] >> test.py::test[expr-tagged_runtime_null-default.txt-Results] [GOOD] >> test.py::test[file-where_key_in_file_content_typed--Results] >> test.py::test[dq-dq_replicate_ok-default.txt-Results] [SKIPPED] >> test.py::test[expr-as_table_emptylist--ForceBlocks] >> ReadSessionImplTest::DecompressRaw >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor >> test.py::test[window-current/ansi_current_with_win--Results] [GOOD] >> test.py::test[window-generic/aggregations_mixed_leadlag--ForceBlocks] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> test.py::test[pg-tpcds-q46-default.txt-ForceBlocks] [GOOD] >> test.py::test[view-secure--Results] [GOOD] >> test.py::test[view-trivial_view_concat--Results] >> test.py::test[pg-tpcds-q46-default.txt-Results] >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> test.py::test[limit-limit--Results] [GOOD] >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest >> test.py::test[blocks-filter_by_column_with_drop--ForceBlocks] [GOOD] >> test.py::test[blocks-filter_by_column_with_drop--Results] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit >> test.py::test[lineage-select_field-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_many-default.txt-Results] [SKIPPED] >> test.py::test[lineage-with_inline-default.txt-Results] [SKIPPED] >> test.py::test[join-pullup_extend--ForceBlocks] [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> test.py::test[window-win_func_rank_by_part--ForceBlocks] [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> test.py::test[window-win_func_rank_by_part--Results] >> test.py::test[join-pullup_extend--Results] >> test.py::test[join-yql-14829_left--ForceBlocks] [GOOD] >> test.py::test[join-yql-14829_left--Results] |55.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |55.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |55.2%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut >> test.py::test[aggregate-percentiles_grouped_expr--ForceBlocks] [GOOD] >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx >> test.py::test[select-hits_count--Results] [GOOD] >> test.py::test[select-literal_bool-default.txt-Results] |55.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |55.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |55.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut >> test.py::test[aggregate-percentiles_grouped_expr--Results] |55.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |55.2%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-11-03T14:02:59.308702Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.308711Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.308715Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:59.326708Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:59.337969Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:59.341608Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.346744Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:59.351020Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.351028Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.351032Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:59.358705Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:59.370713Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:59.374688Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.374859Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:59.374970Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-11-03T14:02:59.375323Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.375328Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.375333Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:59.395528Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:59.406699Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:59.406786Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.410710Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:59.410918Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.414724Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-11-03T14:02:59.418686Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:59.418714Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-11-03T14:02:59.423064Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.423072Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.423076Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:59.438708Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:59.442704Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:59.466713Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.470722Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-11-03T14:02:59.567073Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-11-03T14:02:59.567128Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-11-03T14:02:59.567211Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-11-03T14:02:59.567248Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-11-03T14:02:59.567303Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:59.567309Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-11-03T14:02:59.567318Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-11-03T14:02:59.567356Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-11-03T14:02:59.567364Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-11-03T14:02:59.567368Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-11-03T14:02:59.567372Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-11-03T14:02:59.567394Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-11-03T14:02:59.567413Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-11-03T14:02:59.567417Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-11-03T14:02:59.567419Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-11-03T14:02:59.567433Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-11-03T14:02:59.567439Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-11-03T14:02:59.567448Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-11-03T14:02:59.567452Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-11-03T14:02:59.567469Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-11-03T14:02:59.567935Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.567941Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.567945Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:59.578682Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:59.590721Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:59.590804Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.594711Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-11-03T14:02:59.594942Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-11-03T14:02:59.594976Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-11-03T14:02:59.598720Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-11-03T14:02:59.598750Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-11-03T14:02:59.600462Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:59.600476Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-11-03T14:02:59.600482Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-11-03T14:02:59.600486Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-11-03T14:02:59.600498Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-11-03T14:02:59.600565Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2025-11-03T14:02:59.600600Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-11-03T14:02:59.600603Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-11-03T14:02:59.600607Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-11-03T14:02:59.600611Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-11-03T14:02:59.600617Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-11-03T14:02:59.600640Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2025-11-03T14:02:59.605908Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.605915Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.605920Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:59.610699Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:59.615504Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:59.615587Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.618707Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:59.625881Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-11-03T14:02:59.625943Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-11-03T14:02:59.626035Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-11-03T14:02:59.626058Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-11-03T14:02:59.632292Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:59.632312Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-11-03T14:02:59.632319Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-11-03T14:02:59.632324Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-11-03T14:02:59.632337Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-11-03T14:02:59.632346Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-11-03T14:02:59.632391Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2025-11-03T14:02:59.632451Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } |55.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] Test command err: 2025-11-03T14:02:59.788463Z node 1 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgb1 via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgb1': Status: 404 Response body: {"message":"Database not found"} >> test.py::test[blocks-decimal_multiplicative_ops--Results] [GOOD] >> test.py::test[blocks-interval_div--Results] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] |55.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector >> test.py::test[schema-copy-schema-Results] [GOOD] |55.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |55.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector >> test.py::test[aggregate-group_by_expr_mul_col--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-11-03T14:02:59.128792Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.128802Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.128807Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:59.128928Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:59.131392Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:59.133231Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.133366Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:59.133695Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-11-03T14:02:59.133738Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-11-03T14:02:59.133818Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-11-03T14:02:59.133826Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-11-03T14:02:59.133871Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:59.133877Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-11-03T14:02:59.133887Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-11-03T14:02:59.133892Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-11-03T14:02:59.134423Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.134429Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.134434Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:59.150707Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:59.162735Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:59.198711Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.198892Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-11-03T14:02:59.230929Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-11-03T14:02:59.230992Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-11-03T14:02:59.232615Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-11-03T14:02:59.232641Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-11-03T14:02:59.234679Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:59.234697Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-11-03T14:02:59.234712Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-11-03T14:02:59.234773Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-11-03T14:02:59.234785Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-11-03T14:02:59.234789Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-11-03T14:02:59.234793Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-11-03T14:02:59.234823Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-11-03T14:02:59.234862Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-11-03T14:02:59.234866Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-11-03T14:02:59.234870Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-11-03T14:02:59.234884Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-11-03T14:02:59.234890Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-11-03T14:02:59.234896Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-11-03T14:02:59.234901Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-11-03T14:02:59.234922Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-11-03T14:02:59.251090Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.251100Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.251105Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:59.251204Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:59.252821Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:59.252904Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:59.255564Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2025-11-03T14:02:59.278013Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-11-03T14:02:59.278061Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-11-03T14:02:59.283053Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-11-03T14:02:59.283102Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-11-03T14:02:59.283153Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:59.283171Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-11-03T14:02:59.283213Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2025-11-03T14:02:59.283225Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-11-03T14:02:59.283228Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-11-03T14:02:59.283238Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2025-11-03T14:02:59.283244Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-11-03T14:02:59.283247Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-11-03T14:02:59.283273Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2025-11-03T14:02:59.283279Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-11-03T14:02:59.283282Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStream ... tream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-11-03T14:03:00.363438Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2025-11-03T14:03:00.403033Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-11-03T14:03:00.403042Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-11-03T14:03:00.403045Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:03:00.403139Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:03:00.403276Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:03:00.403356Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-11-03T14:03:00.403443Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-11-03T14:03:00.441346Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-11-03T14:03:00.441573Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:03:00.441819Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-11-03T14:03:00.442163Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-11-03T14:03:00.442312Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-11-03T14:03:00.442948Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-11-03T14:03:00.443098Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-11-03T14:03:00.443238Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-11-03T14:03:00.443378Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-11-03T14:03:00.444552Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-11-03T14:03:00.444694Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-11-03T14:03:00.444721Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-11-03T14:03:00.444801Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-11-03T14:03:00.446533Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2025-11-03T14:03:00.446992Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:03:00.446997Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:03:00.447000Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:03:00.447081Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:03:00.447163Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:03:00.447205Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:03:00.447256Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:03:00.447315Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-11-03T14:03:00.447550Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:03:00.447554Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:03:00.447558Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:03:00.447608Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:03:00.447682Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:03:00.447706Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:03:00.447838Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:03:00.447874Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-11-03T14:03:00.447898Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:03:00.447905Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-11-03T14:03:00.447942Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 |55.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> test.py::test[join-bush_in_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_in_in-off-Results] [SKIPPED] >> test.py::test[insert-trivial_literals-default.txt-Results] [GOOD] >> test.py::test[insert-use_anon_table_without_fill_fail--Results] >> test.py::test[join-count_bans-off-ForceBlocks] >> test.py::test[blocks-filter_by_column_with_drop--Results] [GOOD] >> test.py::test[blocks-filter_direct_col--ForceBlocks] >> TDatabaseResolverTests::Ydb_Serverless [GOOD] |55.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[lineage-with_inline-default.txt-Results] [SKIPPED] >> test.py::test[pg-tpcds-q09-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Results] >> TDatabaseResolverTests::DataStreams_Dedicated >> test.py::test[produce-process_with_lambda_outstream-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_lambda--Results] [SKIPPED] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Results] >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] >> test.py::test[optimizers-unused_columns_window--ForceBlocks] [GOOD] >> test.py::test[optimizers-unused_columns_window--Results] >> test.py::test[pg-tpch-q07-default.txt-Results] [GOOD] >> test.py::test[datetime-date_tz_table_sort_asc--Results] [GOOD] >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] >> test.py::test[blocks-add_uint64_opt--Results] [GOOD] >> test.py::test[blocks-combine_all_pg_filter--Results] >> test.py::test[pg-tpcds-q46-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q55-default.txt-ForceBlocks] >> test.py::test[join-pullup_null_column--Results] [GOOD] >> test.py::test[pg-tpch-q10-default.txt-Results] >> test.py::test[blocks-combine_all_sum_filter--Results] [GOOD] >> test.py::test[blocks-combine_hashed_pg--Results] |55.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> test.py::test[action-subquery_merge1-default.txt-Results] [GOOD] >> test.py::test[action-subquery_merge2-default.txt-Results] >> test.py::test[window-win_func_rank_by_part--Results] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-ForceBlocks] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-Results] >> test.py::test[window-yql-14738-default.txt-ForceBlocks] >> test.py::test[aggr_factory-top_by-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggr_factory-top_by-default.txt-Results] >> test.py::test[pg-select_common_type_unionall--Results] [GOOD] >> test.py::test[pg-tpcds-q10-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q27-default.txt-Results] >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename--Results] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename-off-ForceBlocks] >> test.py::test[in-in_types_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_types_cast-default.txt-Results] >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2025-11-03T14:03:02.288045Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] >> test.py::test[join-bush_dis_in_in_in--Results] [GOOD] >> test.py::test[join-bush_dis_in_in_in-off-ForceBlocks] >> test.py::test[join-pullup_extend--Results] [GOOD] >> test.py::test[join-selfjoin_on_sorted--ForceBlocks] >> test.py::test[aggr_factory-avg-default.txt-Results] [GOOD] >> test.py::test[aggregate-GroupByOneField--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2025-11-03T14:03:02.886088Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. >> test.py::test[order_by-order_by_num_key_and_subkey_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey_desc--Results] |55.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |55.3%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |55.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |55.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 >> TDatabaseResolverTests::ClickHouseNative |55.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |55.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |55.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |55.3%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut >> TDatabaseResolverTests::ClickHouseNative [GOOD] >> TDatabaseResolverTests::ClickHouseHttp [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2025-11-03T14:03:02.938254Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. |55.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |55.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[join-pullup_null_column--Results] [GOOD] >> test.py::test[insert-use_anon_table_without_fill_fail--Results] [GOOD] >> test.py::test[insert-yql-20257-default.txt-Results] >> TDatabaseResolverTests::MySQL |55.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part10/pytest >> test.py::test[schema-copy-schema-Results] [GOOD] |55.3%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> TDatabaseResolverTests::MySQL [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] >> test.py::test[expr-as_table_emptylist--ForceBlocks] [GOOD] >> test.py::test[expr-as_table_emptylist--Results] >> test.py::test[file-where_key_in_file_content_typed--Results] [GOOD] >> test.py::test[flatten_by-flatten_dict--Results] >> test.py::test[blocks-add_uint8--ForceBlocks] [GOOD] |55.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q09-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q63-default.txt-ForceBlocks] >> test.py::test[tpch-q4-default.txt-ForceBlocks] [GOOD] >> test.py::test[tpch-q4-default.txt-Results] >> test.py::test[pg-join_using_tables3-default.txt-Results] [GOOD] >> test.py::test[pg-select_alias_partial-default.txt-Results] >> test.py::test[view-trivial_view_concat--Results] [GOOD] >> test.py::test[weak_field-weak_field_esc_string--Results] >> test.py::test[blocks-add_uint8--Results] |55.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[pg-select_common_type_unionall--Results] [GOOD] |55.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] |55.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |55.3%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] Test command err: 2025-11-03T14:03:03.893730Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed MySQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-mysql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. >> test.py::test[optimizers-unused_columns_window--Results] [GOOD] >> test.py::test[optimizers-yql-14279_keyextract_with_world_dep--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-14279_keyextract_with_world_dep--Results] [SKIPPED] >> test.py::test[optimizers-yql-17715_concat_sort_desc--ForceBlocks] [SKIPPED] >> test.py::test[optimizers-yql-17715_concat_sort_desc--Results] [SKIPPED] >> test.py::test[optimizers-yql-20264-default.txt-ForceBlocks] >> test.py::test[aggregate-percentiles_grouped_expr--Results] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-Results] [GOOD] >> test.py::test[schema-limit_simple--ForceBlocks] >> test.py::test[bigdate-implicit_cast_callable-default.txt-ForceBlocks] |55.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |55.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |55.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table >> test.py::test[in-in_types_cast-default.txt-Results] [GOOD] >> test.py::test[in-in_types_cast_all-default.txt-ForceBlocks] >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] >> test.py::test[select-literal_bool-default.txt-Results] [GOOD] >> test.py::test[select-missing_with_nonpersist--Results] [SKIPPED] >> test.py::test[select-trivial_group_by-default.txt-Results] >> test.py::test[limit-dynamic_limit_offset_overflow-default.txt-Results] [GOOD] >> test.py::test[limit-yql-8046_empty_sorted_desc--Results] >> test.py::test[limit-yql-8046_empty_sorted_desc--Results] [SKIPPED] >> test.py::test[limit-zero_limit-default.txt-Results] |55.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> test.py::test[window-full/syscolumns--Results] [GOOD] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-Results] >> TFlatMetrics::MaximumValue3 [GOOD] >> TFlatMetrics::MaximumValue4 [GOOD] >> TabletState::SeqNoSubscribeOutOfOrder >> test.py::test[order_by-order_by_num_key_and_subkey_desc--Results] [GOOD] >> test.py::test[pg-join_using_tables1-default.txt-ForceBlocks] >> test.py::test[window-full/syscolumns--ForceBlocks] [GOOD] >> AsyncIndexChangeCollector::UpsertSingleRow >> test.py::test[window-full/syscolumns--Results] >> TabletState::SeqNoSubscribeOutOfOrder [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] Test command err: 2025-11-03T14:03:04.693004Z node 1 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Ydb database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': Connection timeout |55.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup >> CdcStreamChangeCollector::InsertSingleRow |55.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |55.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |55.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue4 [GOOD] >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP >> AsyncIndexChangeCollector::InsertSingleRow >> test.py::test[aggr_factory-top_by-default.txt-Results] [GOOD] >> test.py::test[blocks-add_uint8--Results] [GOOD] >> test.py::test[blocks-coalesce_bools--ForceBlocks] >> AsyncIndexChangeCollector::UpsertToSameKey >> test.py::test[join-yql-14829_left--Results] [GOOD] >> test.py::test[join-yql_465--ForceBlocks] >> test.py::test[expr-as_table_emptylist--Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-ForceBlocks] >> CdcStreamChangeCollector::UpsertIntoTwoStreams >> test.py::test[hor_join-table_record--ForceBlocks] |55.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscribeOutOfOrder [GOOD] >> test.py::test[blocks-filter_direct_col--ForceBlocks] [GOOD] >> test.py::test[blocks-filter_direct_col--Results] >> test.py::test[action-subquery_merge2-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-hll-default.txt-Results] >> AsyncIndexChangeCollector::DeleteNothing >> test.py::test[tpch-q4-default.txt-Results] [GOOD] >> test.py::test[type_v3-append_diff_layout1--ForceBlocks] >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows >> test.py::test[blocks-interval_div--Results] [GOOD] >> test.py::test[blocks-interval_sub_interval--Results] >> test.py::test[pg-tpcds-q55-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q55-default.txt-Results] >> TStreamingQueryTestReboots::SimpleDropStreamingQuery [GOOD] |55.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> test.py::test[pg-tpcds-q27-default.txt-Results] [GOOD] >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue |55.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest |55.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |55.4%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut >> test.py::test[pg-tpcds-q41-default.txt-Results] >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows |55.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query_reboots/unittest >> TStreamingQueryTestReboots::SimpleDropStreamingQuery [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:115:2146] 2025-11-03T14:02:55.260200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:02:55.260234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:02:55.260240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:02:55.260249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:02:55.260256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:02:55.260260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:02:55.260293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:02:55.260306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:02:55.260436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:02:55.260513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:02:55.280038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-11-03T14:02:55.280086Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:02:55.280191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:180:2058] recipient: [1:15:2062] 2025-11-03T14:02:55.283197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:02:55.284277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:02:55.284340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:02:55.286025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:02:55.286079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:02:55.286213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:02:55.286287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:02:55.286913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:02:55.286954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:02:55.287223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:02:55.287232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:02:55.287256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:02:55.287264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:02:55.287270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:02:55.287289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-11-03T14:02:55.288645Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-11-03T14:02:55.309584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:02:55.309676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:02:55.309750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:02:55.309758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:02:55.309813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:02:55.309831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:02:55.310875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:02:55.310940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:02:55.311007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:02:55.311018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:02:55.311024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:02:55.311030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:02:55.312060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:02:55.312077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:02:55.312084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:02:55.314062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:02:55.314076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:02:55.314081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:02:55.314087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:02:55.314662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:02:55.315110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:02:55.315158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:02:55.315411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:02:55.315439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tabl ... : advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1003 at step: 5000004 2025-11-03T14:03:06.536628Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:03:06.536652Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 141733922928 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:03:06.536660Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_streaming_query.cpp:22: [72057594046678944] TDropStreamingQuery TPropose, operationId: 1003:0, HandleReply TEvOperationPlan: step# 5000004 2025-11-03T14:03:06.536682Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-11-03T14:03:06.536701Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1003:0 128 -> 240 2025-11-03T14:03:06.536730Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-11-03T14:03:06.536738Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-11-03T14:03:06.537046Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-11-03T14:03:06.537145Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 FAKE_COORDINATOR: Erasing txId 1003 2025-11-03T14:03:06.537529Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:03:06.537540Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:03:06.537586Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-11-03T14:03:06.537621Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:03:06.537628Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [33:211:2212], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2025-11-03T14:03:06.537634Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [33:211:2212], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-11-03T14:03:06.537683Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-11-03T14:03:06.537692Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-11-03T14:03:06.537709Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-11-03T14:03:06.537714Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-11-03T14:03:06.537720Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1003:0 progress is 1/1 2025-11-03T14:03:06.537725Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-11-03T14:03:06.537730Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: false 2025-11-03T14:03:06.537736Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-11-03T14:03:06.537742Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1003:0 2025-11-03T14:03:06.537747Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1003:0 2025-11-03T14:03:06.537759Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-11-03T14:03:06.537768Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1003, publications: 2, subscribers: 0 2025-11-03T14:03:06.537773Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-11-03T14:03:06.537777Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1003, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-11-03T14:03:06.537874Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-11-03T14:03:06.537891Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-11-03T14:03:06.537897Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1003 2025-11-03T14:03:06.537903Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-11-03T14:03:06.537911Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-11-03T14:03:06.537989Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-11-03T14:03:06.537997Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-11-03T14:03:06.538008Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-11-03T14:03:06.538041Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-11-03T14:03:06.538053Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 1003 2025-11-03T14:03:06.538058Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1003 2025-11-03T14:03:06.538064Z node 33 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-11-03T14:03:06.538068Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-11-03T14:03:06.538079Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1003, subscribers: 0 2025-11-03T14:03:06.545019Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-11-03T14:03:06.545083Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-11-03T14:03:06.545101Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-11-03T14:03:06.545177Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-11-03T14:03:06.545186Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-11-03T14:03:06.545274Z node 33 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-11-03T14:03:06.545303Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-11-03T14:03:06.545309Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [33:356:2346] TestWaitNotification: OK eventTxId 1003 2025-11-03T14:03:06.545711Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/StreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-11-03T14:03:06.545779Z node 33 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/StreamingQuery" took 394us result status StatusPathDoesNotExist 2025-11-03T14:03:06.545822Z node 33 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/StreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/StreamingQuery" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow >> test.py::test[join-selfjoin_on_sorted--ForceBlocks] [GOOD] >> test.py::test[join-selfjoin_on_sorted--Results] |55.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> test.py::test[window-generic/aggregations_mixed_leadlag--ForceBlocks] [GOOD] >> test.py::test[join-count_bans-off-ForceBlocks] [GOOD] >> test.py::test[join-count_bans-off-Results] >> test.py::test[window-generic/aggregations_mixed_leadlag--Results] >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults >> test.py::test[join-count_bans-off-Results] [SKIPPED] >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> test.py::test[join-flatten_columns2--ForceBlocks] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> test.py::test[blocks-filter_direct_col--Results] [GOOD] >> test.py::test[blocks-interval_mul_scalar--ForceBlocks] >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow >> test.py::test[join-count_bans--Results] [GOOD] >> test.py::test[join-equi_join_three_simple-off-Results] [SKIPPED] >> test.py::test[join-full_trivial_udf_call--Results] >> test.py::test[produce-reduce_multi_in_keytuple_difftype--Results] [GOOD] >> test.py::test[produce-reduce_multi_out--Results] [SKIPPED] >> test.py::test[result_types-containers-default.txt-Results] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> CdcStreamChangeCollector::DeleteNothing >> test.py::test[pg-tpcds-q55-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q60-default.txt-ForceBlocks] >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow |55.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert >> test.py::test[pg-tpcds-q63-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q63-default.txt-Results] |55.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> test.py::test[weak_field-weak_field_esc_string--Results] [GOOD] >> test.py::test[window-current/aggregations--Results] >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> test.py::test[aggregate-GroupByOneField--Results] [GOOD] >> test.py::test[aggregate-agg_phases_table3-default.txt-Results] >> test.py::test[pg-select_alias_partial-default.txt-Results] [GOOD] >> test.py::test[pg-select_subquery-default.txt-Results] >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow >> test.py::test[schema-limit_simple--ForceBlocks] [GOOD] >> test.py::test[schema-limit_simple--Results] |55.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/library/table_creator/ut/unittest >> test.py::test[aggregate-group_by_gs_flatten_columns-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_compact--Results] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gs_gs--Results] >> test.py::test[optimizers-yql-20264-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_types_cast_all-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_types_cast_all-default.txt-Results] >> test.py::test[join-bush_dis_in_in_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_dis_in_in_in-off-Results] [SKIPPED] >> test.py::test[optimizers-yql-20264-default.txt-Results] >> test.py::test[insert-yql-20257-default.txt-Results] [GOOD] >> test.py::test[join-anyjoin_merge_nodup-off-Results] [SKIPPED] >> test.py::test[join-cbo_4tables_only_sorted_merge--Results] [SKIPPED] >> test.py::test[join-extract_or_predicates-default.txt-Results] >> test.py::test[join-emptyjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-emptyjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-inner_with_order--ForceBlocks] >> test.py::test[join-selfjoin_on_sorted--Results] [GOOD] >> test.py::test[pg-join_using_tables1-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-join_using_tables1-default.txt-Results] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow >> test.py::test[join-mergejoin_with_different_key_names_norename-off-ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_different_key_names_norename-off-Results] [SKIPPED] >> test.py::test[join-pullup_exclusion-off-ForceBlocks] >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow >> test.py::test[limit-zero_limit-default.txt-Results] [GOOD] >> test.py::test[lineage-select_all-default.txt-Results] [SKIPPED] >> DataShardReadTableSnapshots::ReadTableSnapshot >> test.py::test[lineage-select_field_limit_offset-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_join-default.txt-Results] [SKIPPED] >> test.py::test[lineage-select_nested_table_row-default.txt-Results] >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage >> test.py::test[lineage-select_nested_table_row-default.txt-Results] [SKIPPED] >> test.py::test[lineage-window_one-default.txt-Results] [SKIPPED] >> test.py::test[optimizers-sort_constraint_in_left--Results] >> test.py::test[blocks-coalesce_bools--ForceBlocks] [GOOD] >> test.py::test[window-full/syscolumns--Results] [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-ForceBlocks] >> test.py::test[hor_join-table_record--ForceBlocks] [GOOD] >> test.py::test[hor_join-table_record--Results] >> test.py::test[pg-tpcds-q63-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q67-default.txt-ForceBlocks] >> test.py::test[join-yql_465--ForceBlocks] [GOOD] >> test.py::test[join-yql_465--Results] >> test.py::test[blocks-coalesce_bools--Results] >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose >> test.py::test[distinct-distinct_count_and_full_count-default.txt-Results] [GOOD] >> test.py::test[epochs-use_sorted_by_complex_type--Results] [SKIPPED] >> test.py::test[expr-langver--Results] >> test.py::test[schema-limit_simple--Results] [GOOD] >> test.py::test[schema-select_all_inferschema--ForceBlocks] >> test.py::test[window-yql-14738-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-yql-14738-default.txt-Results] >> test.py::test[select-trivial_group_by-default.txt-Results] [GOOD] >> test.py::test[select_yql-from_table_with_use_cluster-default.txt-Results] [SKIPPED] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage >> test.py::test[bigdate-implicit_cast_callable-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-implicit_cast_callable-default.txt-Results] >> test.py::test[select_yql-order_by_limit_offset-default.txt-Results] [SKIPPED] >> test.py::test[stream_lookup_join-lookup_join_narrow-default.txt-Results] [SKIPPED] >> test.py::test[table_range-each_with_non_existing--Results] >> IncrementalBackup::SimpleBackup >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn |55.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[join-selfjoin_on_sorted--Results] [GOOD] >> test.py::test[in-in_types_cast_all-default.txt-Results] [GOOD] >> test.py::test[insert-append--ForceBlocks] >> test.py::test[optimizers-yql-20264-default.txt-Results] [GOOD] >> test.py::test[order_by-assume_with_transform_desc--ForceBlocks] >> test.py::test[type_v3-append_diff_layout1--ForceBlocks] [GOOD] |55.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |55.4%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |55.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] >> test.py::test[blocks-combine_all_pg_filter--Results] [GOOD] >> test.py::test[blocks-date_top_sort--Results] >> test.py::test[window-generic/aggregations_mixed_leadlag--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort_desc--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: 2025-11-03T14:03:05.687160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:05.736438Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:05.738134Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:05.738190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:05.738215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/00283f/r3tmp/tmp9rEvg8/pdisk_1.dat 2025-11-03T14:03:05.803719Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:05.803763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:05.815309Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:05.816459Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178585279052 != 1762178585279056 2025-11-03T14:03:05.847644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:05.896750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:05.940926Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:06.021588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:06.041166Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:686:2574] 2025-11-03T14:03:06.041240Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:06.049183Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:06.049234Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:06.049388Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:06.049396Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:06.049401Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:06.049455Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:06.049522Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:689:2577] 2025-11-03T14:03:06.049549Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:06.050555Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:06.050570Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:713:2574] in generation 1 2025-11-03T14:03:06.050707Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:06.050721Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:06.050808Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-11-03T14:03:06.050814Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-11-03T14:03:06.050818Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-11-03T14:03:06.050843Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:06.050854Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:06.050859Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:718:2577] in generation 1 2025-11-03T14:03:06.061112Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:06.065032Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:06.065108Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:06.065133Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:721:2595] 2025-11-03T14:03:06.065138Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:06.065143Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:06.065149Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:06.065265Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:06.065275Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-11-03T14:03:06.065287Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:06.065297Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:722:2596] 2025-11-03T14:03:06.065301Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-11-03T14:03:06.065305Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-11-03T14:03:06.065310Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:06.065421Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:06.065452Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:06.065494Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:06.065503Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:06.065514Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:06.065520Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:06.065528Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-11-03T14:03:06.065540Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-11-03T14:03:06.065659Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2570], serverId# [1:687:2575], sessionId# [0:0:0] 2025-11-03T14:03:06.065671Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-11-03T14:03:06.065676Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:06.065681Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-11-03T14:03:06.065687Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-11-03T14:03:06.065731Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:06.065806Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:06.065828Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:06.065924Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2571], serverId# [1:691:2578], sessionId# [0:0:0] 2025-11-03T14:03:06.065970Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-11-03T14:03:06.066000Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-11-03T14:03:06.066013Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-11-03T14:03:06.068280Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:06.068307Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-11-03T14:03:06.080426Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:03:06.080478Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-11-03T14:03:06.080660Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-11-03T14:03:06.080673Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-11-03T14:03:06.224180Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, cl ... _path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:10.578357Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-11-03T14:03:10.578367Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-11-03T14:03:10.583336Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-11-03T14:03:10.583437Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-11-03T14:03:10.583452Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-11-03T14:03:10.583461Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-11-03T14:03:10.583615Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-11-03T14:03:10.583676Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-11-03T14:03:10.583686Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-11-03T14:03:10.583749Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-11-03T14:03:10.583756Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-11-03T14:03:10.585715Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:836:2685], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:10.585740Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:847:2690], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:10.585751Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:10.585914Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:851:2694], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:10.585934Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:10.586784Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:10.587990Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:10.588014Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-11-03T14:03:10.588028Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-11-03T14:03:10.634846Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:10.732618Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:10.732664Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-11-03T14:03:10.732679Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-11-03T14:03:10.733302Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:850:2693], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:10.770035Z node 4 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [4:924:2736] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:10.796002Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k950esvs482px6tf7qxwn8c6, Database: , SessionId: ydb://session/3?node_id=4&id=ZThjNDVkMmUtZTc4NzU2ZDgtZmY3NDkxNTAtNzUxY2VlNjc=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:10.796826Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1035:2784], serverId# [4:1036:2785], sessionId# [0:0:0] 2025-11-03T14:03:10.796962Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037889 2025-11-03T14:03:10.797046Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1762178590797010 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-11-03T14:03:10.797078Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1762178590797010 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-11-03T14:03:10.797095Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-11-03T14:03:10.807514Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-11-03T14:03:10.807543Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:10.820219Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k950et2r0yxvc9a2wa22yg1f, Database: , SessionId: ydb://session/3?node_id=4&id=YzhhZGY4NjEtODVkN2E4MzUtODVmNTNkMi0yOTI2NjJkYw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:10.820802Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037889 2025-11-03T14:03:10.820871Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1762178590820848 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-11-03T14:03:10.820893Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 4 Group: 1762178590820848 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-11-03T14:03:10.820902Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 5 Group: 1762178590820848 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-11-03T14:03:10.820913Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 6 Group: 1762178590820848 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-11-03T14:03:10.820924Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037889, row count=1 2025-11-03T14:03:10.831273Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-11-03T14:03:10.831298Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:10.832230Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1084:2824], serverId# [4:1085:2825], sessionId# [0:0:0] 2025-11-03T14:03:10.833017Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1086:2826], serverId# [4:1087:2827], sessionId# [0:0:0] >> test.py::test[pg-tpcds-q41-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q53-default.txt-Results] >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] >> test.py::test[blocks-interval_sub_interval--Results] [GOOD] >> test.py::test[blocks-pg_to_dates--Results] |55.5%| [CC] {default-linux-x86_64, relwithdebinfo} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: 2025-11-03T14:03:06.133982Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:06.172558Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:06.175096Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:06.175179Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:06.175218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/00280c/r3tmp/tmpVe9x9C/pdisk_1.dat 2025-11-03T14:03:06.251238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:06.251270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:06.261040Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:06.262036Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178585679881 != 1762178585679885 2025-11-03T14:03:06.293222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:06.341485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:06.384458Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:06.458394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:06.473402Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:686:2574] 2025-11-03T14:03:06.473457Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:06.480793Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:06.480841Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:06.480993Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:06.481002Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:06.481007Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:06.481057Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:06.481119Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:689:2577] 2025-11-03T14:03:06.481144Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:06.482179Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:06.482196Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:713:2574] in generation 1 2025-11-03T14:03:06.482312Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:06.482325Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:06.482412Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-11-03T14:03:06.482417Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-11-03T14:03:06.482422Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-11-03T14:03:06.482447Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:06.482458Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:06.482463Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:718:2577] in generation 1 2025-11-03T14:03:06.492728Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:06.498206Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:06.498291Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:06.498320Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:721:2595] 2025-11-03T14:03:06.498326Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:06.498332Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:06.498338Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:06.498452Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:06.498462Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-11-03T14:03:06.498471Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:06.498480Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:722:2596] 2025-11-03T14:03:06.498484Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-11-03T14:03:06.498488Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-11-03T14:03:06.498492Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:06.498589Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:06.498612Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:06.498669Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:06.498677Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:06.498686Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:06.498692Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:06.498698Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-11-03T14:03:06.498708Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-11-03T14:03:06.498794Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2570], serverId# [1:687:2575], sessionId# [0:0:0] 2025-11-03T14:03:06.498802Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-11-03T14:03:06.498806Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:06.498811Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-11-03T14:03:06.498817Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-11-03T14:03:06.498851Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:06.498909Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:06.498926Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:06.498998Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2571], serverId# [1:691:2578], sessionId# [0:0:0] 2025-11-03T14:03:06.499025Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-11-03T14:03:06.499052Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-11-03T14:03:06.499063Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-11-03T14:03:06.499417Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:06.499444Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-11-03T14:03:06.509758Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:03:06.509798Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-11-03T14:03:06.509935Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-11-03T14:03:06.509943Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-11-03T14:03:06.644275Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, cl ... ASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-11-03T14:03:11.211555Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:11.211571Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-11-03T14:03:11.211696Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-11-03T14:03:11.211804Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:11.212147Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-11-03T14:03:11.212162Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-11-03T14:03:11.212239Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-11-03T14:03:11.212316Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:11.212675Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-11-03T14:03:11.212686Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:11.212788Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-11-03T14:03:11.212800Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:11.213119Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-11-03T14:03:11.213131Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:11.213202Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-11-03T14:03:11.213213Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-11-03T14:03:11.213492Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:11.213506Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:11.213513Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-11-03T14:03:11.213531Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:11.213543Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-11-03T14:03:11.213556Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:11.213925Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-11-03T14:03:11.213939Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-11-03T14:03:11.213944Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2025-11-03T14:03:11.213957Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:11.213965Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-11-03T14:03:11.213976Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:11.214357Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:11.214381Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-11-03T14:03:11.215281Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-11-03T14:03:11.215301Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-11-03T14:03:11.215487Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-11-03T14:03:11.215540Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-11-03T14:03:11.215547Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-11-03T14:03:11.215674Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-11-03T14:03:11.217507Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:788:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:11.217529Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:799:2653], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:11.217540Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:11.217710Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:803:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:11.217731Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:11.218599Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:11.219889Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:11.219916Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-11-03T14:03:11.263790Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:11.367117Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:11.367171Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-11-03T14:03:11.367937Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:802:2656], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:11.401056Z node 4 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [4:875:2698] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:11.429519Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k950etfh3x9pn3bsegzmwgv4, Database: , SessionId: ydb://session/3?node_id=4&id=OGYwYTc3M2YtNTc1ZDE3MmItZGI3ZDc4MjYtMTE3ZDAyM2M=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:11.430243Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:950:2734], serverId# [4:951:2735], sessionId# [0:0:0] 2025-11-03T14:03:11.430399Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037889 2025-11-03T14:03:11.430500Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1762178591430456 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-11-03T14:03:11.430539Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-11-03T14:03:11.441147Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-11-03T14:03:11.441198Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:11.442759Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:957:2740], serverId# [4:958:2741], sessionId# [0:0:0] 2025-11-03T14:03:11.444003Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:959:2742], serverId# [4:960:2743], sessionId# [0:0:0] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-Results] >> CdcStreamChangeCollector::NewImage [GOOD] >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD |55.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp >> test.py::test[pg-join_using_tables1-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt-ForceBlocks] >> DataShardReadTableSnapshots::ReadTableSnapshot [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter >> test.py::test[flatten_by-flatten_dict--Results] [GOOD] >> test.py::test[flatten_by-flatten_expr_join--Results] [SKIPPED] >> test.py::test[flatten_by-struct_without_correlation-default.txt-Results] >> test.py::test[hor_join-table_record--Results] [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] >> test.py::test[in-in_enum_single0-default.txt-ForceBlocks] >> test.py::test[window-win_func_auto_arg_two_sort-default.txt-Results] [GOOD] >> test.py::test[window-win_func_first_last--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: 2025-11-03T14:03:06.194578Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:06.236408Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:06.238116Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:06.238170Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:06.238192Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0027ef/r3tmp/tmpQN9dsF/pdisk_1.dat 2025-11-03T14:03:06.292341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:06.292383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:06.305245Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:06.306216Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178585792039 != 1762178585792043 2025-11-03T14:03:06.337386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:06.383928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:06.427021Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:06.500748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:06.518600Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:686:2574] 2025-11-03T14:03:06.518680Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:06.526249Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:06.526303Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:06.526441Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:06.526449Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:06.526455Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:06.526504Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:06.526577Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:689:2577] 2025-11-03T14:03:06.526604Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:06.527616Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:06.527630Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:713:2574] in generation 1 2025-11-03T14:03:06.527743Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:06.527757Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:06.527843Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-11-03T14:03:06.527848Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-11-03T14:03:06.527853Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-11-03T14:03:06.527881Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:06.527892Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:06.527897Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:718:2577] in generation 1 2025-11-03T14:03:06.538266Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:06.542994Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:06.543106Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:06.543141Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:721:2595] 2025-11-03T14:03:06.543147Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:06.543153Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:06.543160Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:06.543315Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:06.543326Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-11-03T14:03:06.543339Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:06.543350Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:722:2596] 2025-11-03T14:03:06.543355Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-11-03T14:03:06.543359Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-11-03T14:03:06.543362Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:06.543510Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:06.543542Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:06.543586Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:06.543595Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:06.543607Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:06.543612Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:06.543619Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-11-03T14:03:06.543630Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-11-03T14:03:06.543758Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2570], serverId# [1:687:2575], sessionId# [0:0:0] 2025-11-03T14:03:06.543767Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-11-03T14:03:06.543771Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:06.543776Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-11-03T14:03:06.543781Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-11-03T14:03:06.543822Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:06.543907Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:06.543931Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:06.544029Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2571], serverId# [1:691:2578], sessionId# [0:0:0] 2025-11-03T14:03:06.544069Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-11-03T14:03:06.544098Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-11-03T14:03:06.544111Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-11-03T14:03:06.544490Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:06.544509Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-11-03T14:03:06.554967Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:03:06.555026Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-11-03T14:03:06.555220Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-11-03T14:03:06.555231Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-11-03T14:03:06.693647Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, cl ... .cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:11.403061Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-11-03T14:03:11.403074Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:11.403521Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-11-03T14:03:11.403655Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-11-03T14:03:11.403666Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-11-03T14:03:11.405868Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:11.405908Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-11-03T14:03:11.405922Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-11-03T14:03:11.405926Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-11-03T14:03:11.406155Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:11.431269Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:03:11.491225Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:11.595711Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-11-03T14:03:11.595743Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:11.595805Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:11.595816Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-11-03T14:03:11.595827Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-11-03T14:03:11.595892Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-11-03T14:03:11.595927Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-11-03T14:03:11.595985Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:11.596191Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:11.635777Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-11-03T14:03:11.635819Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:11.635829Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:11.635844Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:11.635873Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:11.635890Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-11-03T14:03:11.635907Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:11.636656Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-11-03T14:03:11.636671Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-11-03T14:03:11.638270Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:882:2708], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:11.638295Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:892:2713], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:11.638368Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:11.638508Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:898:2718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:11.638640Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:11.639621Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:11.641078Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:11.807793Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:11.810690Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:896:2716], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-11-03T14:03:11.835789Z node 4 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [4:954:2755] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:11.857934Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k950etwn8pp909q391bj9eym, Database: , SessionId: ydb://session/3?node_id=4&id=NTRhM2QxNjEtYzBmMjRkOTEtZjA1YjMwNmEtODk0MTQwNWI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:11.858703Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:985:2772], serverId# [4:986:2773], sessionId# [0:0:0] 2025-11-03T14:03:11.858867Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037888 2025-11-03T14:03:11.858963Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1762178591858924 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-11-03T14:03:11.859000Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-11-03T14:03:11.869574Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-11-03T14:03:11.869603Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:11.886331Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k950ev3ycfkj3rsky998w031, Database: , SessionId: ydb://session/3?node_id=4&id=Zjc1ODFjZDYtMWUzYWYzLTJjMzkyNWI4LTI3MjQ3Nzhi, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:11.887102Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:4] at 72075186224037888 2025-11-03T14:03:11.887207Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1762178591887169 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-11-03T14:03:11.887247Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-11-03T14:03:11.897714Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-11-03T14:03:11.897741Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:11.898487Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1013:2791], serverId# [4:1014:2792], sessionId# [0:0:0] 2025-11-03T14:03:11.899538Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1015:2793], serverId# [4:1016:2794], sessionId# [0:0:0] >> IncrementalBackup::MultiBackup >> test.py::test[join-yql_465--Results] [GOOD] >> test.py::test[limit-insert_with_limit-dynamic-ForceBlocks] >> test.py::test[blocks-coalesce_bools--Results] [GOOD] >> test.py::test[blocks-combine_all_minmax_double--ForceBlocks] >> test.py::test[pg-tpch-q10-default.txt-Results] [GOOD] >> test.py::test[produce-process_lambda_opt_args-default.txt-Results] >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows >> CdcStreamChangeCollector::OldImage [GOOD] >> CdcStreamChangeCollector::SchemaChanges |55.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[type_v3-append_diff_layout1--ForceBlocks] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: 2025-11-03T14:03:06.087794Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:06.123902Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:06.126227Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:06.126307Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:06.126342Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/002825/r3tmp/tmpFIwDkJ/pdisk_1.dat 2025-11-03T14:03:06.198468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:06.198518Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:06.213287Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:06.214502Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178585631247 != 1762178585631251 2025-11-03T14:03:06.246711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:06.294861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:06.328161Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:06.412839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:06.430118Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:674:2566] 2025-11-03T14:03:06.430199Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:06.448842Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:06.448896Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:06.449100Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:06.449111Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:06.449120Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:06.449192Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:06.449226Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:06.449242Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-11-03T14:03:06.459624Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:06.465011Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:06.465100Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:06.465128Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-11-03T14:03:06.465135Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:06.465142Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:06.465149Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:06.465355Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:06.465384Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:06.465497Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:06.465506Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:06.465517Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:06.465523Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:06.465538Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:673:2565], serverId# [1:681:2570], sessionId# [0:0:0] 2025-11-03T14:03:06.465571Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:06.465636Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:06.465658Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:06.466096Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:06.478901Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:03:06.478955Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-11-03T14:03:06.618066Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:708:2586], serverId# [1:709:2587], sessionId# [0:0:0] 2025-11-03T14:03:06.619348Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-11-03T14:03:06.619384Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:06.619640Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:06.619654Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-11-03T14:03:06.619668Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-11-03T14:03:06.619755Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-11-03T14:03:06.619796Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-11-03T14:03:06.619875Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:06.619891Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-11-03T14:03:06.620362Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-11-03T14:03:06.620479Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:06.620977Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-11-03T14:03:06.621003Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:06.621294Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-11-03T14:03:06.621311Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:06.621537Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:06.621665Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:06.621676Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:06.621682Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-11-03T14:03:06.621706Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:06.621720Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-11-03T14:03:06.621734Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:06.622941Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-11-03T14:03:06.623105Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-11-03T14:03:06.623117Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72 ... :810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:11.668811Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-11-03T14:03:11.668824Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:11.669280Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-11-03T14:03:11.669418Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-11-03T14:03:11.669429Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-11-03T14:03:11.671434Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:11.671470Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-11-03T14:03:11.671483Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-11-03T14:03:11.671487Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-11-03T14:03:11.671727Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:11.694227Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:03:11.749096Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:11.855423Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-11-03T14:03:11.855454Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:11.855514Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:11.855523Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-11-03T14:03:11.855533Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-11-03T14:03:11.855588Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-11-03T14:03:11.855617Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-11-03T14:03:11.855675Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:11.855855Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:11.895136Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-11-03T14:03:11.895176Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:11.895183Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:11.895195Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:11.895219Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:11.895235Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-11-03T14:03:11.895252Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:11.895974Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-11-03T14:03:11.895993Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-11-03T14:03:11.897505Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:882:2708], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:11.897528Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:892:2713], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:11.897590Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:11.897726Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:898:2718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:11.897748Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:11.898588Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:11.900689Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:12.061297Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:12.061935Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:896:2716], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-11-03T14:03:12.085109Z node 4 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [4:954:2755] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:12.109246Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k950ev4sa2pbxyfz2r2y45dy, Database: , SessionId: ydb://session/3?node_id=4&id=NTA5MzJjMmYtZDZiOWRiNzEtOGY0NDg3ODQtYzZhNmUxODY=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:12.109916Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:991:2777], serverId# [4:992:2778], sessionId# [0:0:0] 2025-11-03T14:03:12.110054Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037888 2025-11-03T14:03:12.110134Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1762178592110098 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-11-03T14:03:12.110167Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-11-03T14:03:12.122938Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-11-03T14:03:12.122967Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:12.138185Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k950evbw1h9wx3eccv1v2qv5, Database: , SessionId: ydb://session/3?node_id=4&id=NWYxNWNkMmYtY2RmMWJhNy04YzE0OWZiYS1hYzZiNWViMA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:12.138890Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:4] at 72075186224037888 2025-11-03T14:03:12.138980Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1762178592138948 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-11-03T14:03:12.139016Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:4] at 72075186224037888, row count=1 2025-11-03T14:03:12.149461Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-11-03T14:03:12.149493Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:12.150195Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1019:2796], serverId# [4:1020:2797], sessionId# [0:0:0] 2025-11-03T14:03:12.151342Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1021:2798], serverId# [4:1022:2799], sessionId# [0:0:0] >> IncrementalBackup::BackupRestore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: 2025-11-03T14:03:06.871265Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:06.908164Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:06.911258Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:06.911349Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:06.911393Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0027e0/r3tmp/tmpKyUZXV/pdisk_1.dat 2025-11-03T14:03:06.981704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:06.981749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:06.993757Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:06.994695Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178586328749 != 1762178586328753 2025-11-03T14:03:07.025923Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:07.079015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:07.112114Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:07.203316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:07.221483Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:686:2574] 2025-11-03T14:03:07.221560Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:07.231377Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:07.231477Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:07.231630Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:07.231638Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:07.231643Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:07.231691Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:07.231764Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:689:2577] 2025-11-03T14:03:07.231793Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:07.232782Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:07.232799Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:713:2574] in generation 1 2025-11-03T14:03:07.232939Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:07.232955Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:07.233092Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-11-03T14:03:07.233103Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-11-03T14:03:07.233111Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-11-03T14:03:07.233151Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:07.233166Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:07.233174Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:718:2577] in generation 1 2025-11-03T14:03:07.243516Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:07.248522Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:07.248617Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:07.248647Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:721:2595] 2025-11-03T14:03:07.248654Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:07.248660Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:07.248666Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:07.248792Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:07.248802Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-11-03T14:03:07.248814Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:07.248824Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:722:2596] 2025-11-03T14:03:07.248828Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-11-03T14:03:07.248832Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-11-03T14:03:07.248836Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:07.248936Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:07.248960Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:07.248998Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:07.249005Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:07.249014Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:07.249019Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:07.249025Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-11-03T14:03:07.249035Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-11-03T14:03:07.249125Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2570], serverId# [1:687:2575], sessionId# [0:0:0] 2025-11-03T14:03:07.249134Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-11-03T14:03:07.249138Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:07.249143Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2025-11-03T14:03:07.249149Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-11-03T14:03:07.249186Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:07.249244Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:07.249265Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:07.249348Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2571], serverId# [1:691:2578], sessionId# [0:0:0] 2025-11-03T14:03:07.249383Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2025-11-03T14:03:07.249409Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-11-03T14:03:07.249420Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-11-03T14:03:07.249718Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:07.249730Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-11-03T14:03:07.260059Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:03:07.260114Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-11-03T14:03:07.260312Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2025-11-03T14:03:07.260326Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-11-03T14:03:07.394873Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, cl ... nStepAccepted TabletId# 72075186224037888 step# 1000} 2025-11-03T14:03:12.183338Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:12.183817Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-11-03T14:03:12.183834Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:12.183912Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-11-03T14:03:12.183921Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-11-03T14:03:12.184275Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:12.184292Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:12.184300Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-11-03T14:03:12.184323Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:12.184339Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-11-03T14:03:12.184355Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:12.184778Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-11-03T14:03:12.184793Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-11-03T14:03:12.184798Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2025-11-03T14:03:12.184813Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:12.184821Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-11-03T14:03:12.184831Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:12.185306Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:12.185329Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-11-03T14:03:12.185943Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-11-03T14:03:12.185961Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-11-03T14:03:12.186134Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-11-03T14:03:12.186190Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-11-03T14:03:12.186197Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-11-03T14:03:12.186329Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-11-03T14:03:12.188280Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:788:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:12.188298Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:799:2653], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:12.188306Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:12.188442Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:803:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:12.188458Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:12.189232Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:12.190549Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:12.190579Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-11-03T14:03:12.234203Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:12.328645Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:12.328683Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-11-03T14:03:12.329220Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:802:2656], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:12.361192Z node 4 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [4:875:2698] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:12.377048Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k950evdve4tyvxqvfew5ftm3, Database: , SessionId: ydb://session/3?node_id=4&id=MWE5MDMyZGUtN2VlZjViYTYtYmUzNjQyNi1lZGUzMjJjNA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:12.377880Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:944:2729], serverId# [4:945:2730], sessionId# [0:0:0] 2025-11-03T14:03:12.378058Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037889 2025-11-03T14:03:12.378166Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1762178592378121 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-11-03T14:03:12.378209Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037889, row count=1 2025-11-03T14:03:12.388758Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-11-03T14:03:12.388792Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:12.403537Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k950evm691kx7pydg3sc2xfe, Database: , SessionId: ydb://session/3?node_id=4&id=MjkzNTgxNGEtNjBmYjkyYWUtYjRhODQ4YTMtYTRiMTFkYzU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:12.404313Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037889 2025-11-03T14:03:12.404425Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1762178592404384 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-11-03T14:03:12.404464Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1762178592404384 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-11-03T14:03:12.404485Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037889, row count=1 2025-11-03T14:03:12.415027Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-11-03T14:03:12.415062Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:12.416466Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:985:2761], serverId# [4:986:2762], sessionId# [0:0:0] 2025-11-03T14:03:12.417684Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:987:2763], serverId# [4:988:2764], sessionId# [0:0:0] >> test.py::test[result_types-containers-default.txt-Results] [GOOD] >> test.py::test[sampling-sort-default.txt-Results] >> IncrementalBackup::MultiShardIncrementalRestore >> test.py::test[join-flatten_columns2--ForceBlocks] [GOOD] >> test.py::test[join-flatten_columns2--Results] >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] >> test.py::test[pg-select_subquery-default.txt-Results] [GOOD] >> IncrementalBackup::SimpleBackup [GOOD] >> IncrementalBackup::SimpleRestore >> test.py::test[pg-select_subquery2-default.txt-Results] >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] Test command err: 2025-11-03T14:03:11.001780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:11.044921Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:11.048527Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:11.048645Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:11.048699Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/00325d/r3tmp/tmpLeb4CG/pdisk_1.dat 2025-11-03T14:03:11.131552Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:11.131609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:11.152532Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:11.153618Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178590540429 != 1762178590540433 2025-11-03T14:03:11.190327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:11.243669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:11.283343Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:11.373867Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-11-03T14:03:11.373904Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-11-03T14:03:11.373940Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:650:2545] 2025-11-03T14:03:11.396167Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:650:2545] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-11-03T14:03:11.396228Z node 1 :TX_PROXY DEBUG: schemereq.cpp:616: Actor# [1:650:2545] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-11-03T14:03:11.396499Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1741: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-11-03T14:03:11.396520Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1731: Actor# [1:650:2545] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-11-03T14:03:11.396588Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1564: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-11-03T14:03:11.396639Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1611: Actor# [1:650:2545] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-11-03T14:03:11.396657Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:650:2545] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-11-03T14:03:11.396743Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1466: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvClientConnected 2025-11-03T14:03:11.397295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:11.397693Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1488: Actor# [1:650:2545] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-11-03T14:03:11.397713Z node 1 :TX_PROXY DEBUG: schemereq.cpp:596: Actor# [1:650:2545] txid# 281474976715657 SEND to# [1:590:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-11-03T14:03:11.414548Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:03:11.415697Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:03:11.415802Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:674:2566] 2025-11-03T14:03:11.415873Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:11.425688Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:03:11.425951Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:11.425989Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:11.426193Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:11.426206Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:11.426214Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:11.426302Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:11.426341Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:11.426360Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-11-03T14:03:11.436783Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:11.454869Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:11.454976Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:11.455003Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-11-03T14:03:11.455009Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:11.455014Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:11.455021Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:11.455111Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:674:2566], Recipient [1:674:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:11.455119Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:11.455237Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:11.455263Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:11.455394Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:11.455403Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:11.455411Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-11-03T14:03:11.455417Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-11-03T14:03:11.455422Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-11-03T14:03:11.455427Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:11.455434Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:11.455451Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:681:2570], Recipient [1:674:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:11.455460Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:11.455467Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:673:2565], serverId# [1:681:2570], sessionId# [0:0:0] 2025-11-03T14:03:11.455481Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:681:2570] 2025-11-03T14:03:11.455485Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-11-03T14:03:11.455507Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:11.455562Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-11-03T14:03:11.455574Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:11.455597Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:11.455606Z node ... : NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976710662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\003\000\000\000b\005\035!\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\003\000\000\000" 2025-11-03T14:03:13.889666Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1700: [ReadTable [2:866:2683] TxId# 281474976710661] Received stream data from ShardId# 72075186224037890 2025-11-03T14:03:13.889672Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1711: [ReadTable [2:866:2683] TxId# 281474976710661] Sending TEvStreamDataAck to [2:1001:2789] ShardId# 72075186224037890 2025-11-03T14:03:13.889696Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269287428, Sender [2:1001:2789], Recipient [2:866:2683]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976710662 ShardId: 72075186224037890 2025-11-03T14:03:13.889701Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2069: [ReadTable [2:866:2683] TxId# 281474976710661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-11-03T14:03:13.889710Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976710662, PendingAcks: 0 2025-11-03T14:03:13.889775Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269287941, Sender [2:865:2683], Recipient [2:866:2683]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976710661 MessageSizeLimit: 1 ReservedMessages: 1 2025-11-03T14:03:13.889782Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2097: [ReadTable [2:866:2683] TxId# 281474976710661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-11-03T14:03:13.889789Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2160: [ReadTable [2:866:2683] TxId# 281474976710661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-11-03T14:03:13.889796Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976710662, MessageQuota: 1 2025-11-03T14:03:13.889806Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976710662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-11-03T14:03:13.889824Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269550080, Sender [2:1001:2789], Recipient [2:866:2683]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976710662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-11-03T14:03:13.889829Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1700: [ReadTable [2:866:2683] TxId# 281474976710661] Received stream data from ShardId# 72075186224037890 2025-11-03T14:03:13.889834Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1711: [ReadTable [2:866:2683] TxId# 281474976710661] Sending TEvStreamDataAck to [2:1001:2789] ShardId# 72075186224037890 2025-11-03T14:03:13.889842Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976710662, PendingAcks: 0 2025-11-03T14:03:13.889853Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269287428, Sender [2:1001:2789], Recipient [2:866:2683]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976710662 ShardId: 72075186224037890 2025-11-03T14:03:13.889857Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2069: [ReadTable [2:866:2683] TxId# 281474976710661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-11-03T14:03:13.889898Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269287941, Sender [2:865:2683], Recipient [2:866:2683]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976710661 MessageSizeLimit: 1 ReservedMessages: 1 2025-11-03T14:03:13.889904Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2097: [ReadTable [2:866:2683] TxId# 281474976710661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-11-03T14:03:13.889908Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2160: [ReadTable [2:866:2683] TxId# 281474976710661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-11-03T14:03:13.889915Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976710662, MessageQuota: 1 2025-11-03T14:03:13.889924Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976710662, MessageQuota: 1 2025-11-03T14:03:13.889953Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269287429, Sender [2:1001:2789], Recipient [2:866:2683]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976710662 ShardId: 72075186224037890 2025-11-03T14:03:13.889958Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2120: [ReadTable [2:866:2683] TxId# 281474976710661] Received TEvStreamQuotaRelease from ShardId# 72075186224037890 2025-11-03T14:03:13.889964Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2188: [ReadTable [2:866:2683] TxId# 281474976710661] Released quota 1 reserved messages from ShardId# 72075186224037890 2025-11-03T14:03:13.889974Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2025-11-03T14:03:13.889980Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976710662, at: 72075186224037890 2025-11-03T14:03:13.890005Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [2:906:2714], Recipient [2:906:2714]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:13.890010Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:13.890018Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-11-03T14:03:13.890023Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-11-03T14:03:13.890029Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710662] at 72075186224037890 for ReadTableScan 2025-11-03T14:03:13.890035Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710662] at 72075186224037890 on unit ReadTableScan 2025-11-03T14:03:13.890042Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976710662] at 72075186224037890 error: , IsFatalError: 0 2025-11-03T14:03:13.890047Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710662] at 72075186224037890 is Executed 2025-11-03T14:03:13.890052Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710662] at 72075186224037890 executing on unit ReadTableScan 2025-11-03T14:03:13.890056Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710662] at 72075186224037890 to execution unit FinishPropose 2025-11-03T14:03:13.890061Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710662] at 72075186224037890 on unit FinishPropose 2025-11-03T14:03:13.890069Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710662] at 72075186224037890 is DelayComplete 2025-11-03T14:03:13.890074Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710662] at 72075186224037890 executing on unit FinishPropose 2025-11-03T14:03:13.890078Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710662] at 72075186224037890 to execution unit CompletedOperations 2025-11-03T14:03:13.890082Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710662] at 72075186224037890 on unit CompletedOperations 2025-11-03T14:03:13.890091Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710662] at 72075186224037890 is Executed 2025-11-03T14:03:13.890096Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710662] at 72075186224037890 executing on unit CompletedOperations 2025-11-03T14:03:13.890101Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:281474976710662] at 72075186224037890 has finished 2025-11-03T14:03:13.890105Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:13.890110Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2025-11-03T14:03:13.890114Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-11-03T14:03:13.890118Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2025-11-03T14:03:13.890127Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-11-03T14:03:13.890132Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976710662] at 72075186224037890 on unit FinishPropose 2025-11-03T14:03:13.890141Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976710662 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-11-03T14:03:13.890154Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-11-03T14:03:13.890207Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269550080, Sender [2:906:2714], Recipient [2:866:2683]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: COMPLETE TxId: 281474976710662 Step: 0 OrderId: 281474976710662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 65 } } CommitVersion { Step: 0 TxId: 281474976710662 } 2025-11-03T14:03:13.890214Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1850: [ReadTable [2:866:2683] TxId# 281474976710661] Received stream complete from ShardId# 72075186224037890 2025-11-03T14:03:13.890226Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2933: [ReadTable [2:866:2683] TxId# 281474976710661] RESPONSE Status# ExecComplete prepare time: 0.012794s execute time: 0.204454s total time: 0.217248s 2025-11-03T14:03:13.890312Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553190, Sender [2:866:2683], Recipient [2:675:2566]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976710661 2025-11-03T14:03:13.890363Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553190, Sender [2:866:2683], Recipient [2:902:2712]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976710661 2025-11-03T14:03:13.890427Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553190, Sender [2:866:2683], Recipient [2:906:2714]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976710661 >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> test.py::test[expr-langver--Results] [GOOD] >> CdcStreamChangeCollector::SchemaChanges [GOOD] >> test.py::test[blocks-combine_hashed_pg--Results] [GOOD] >> test.py::test[blocks-combine_hashed_sum--Results] >> test.py::test[expr-non_persistable_insert_into_fail--Results] |55.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> test.py::test[join-full_trivial_udf_call--Results] [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg >> test.py::test[join-grace_join2--Results] [SKIPPED] >> test.py::test[join-inner_all_right-off-Results] [SKIPPED] >> test.py::test[join-inner_on_key_only-off-Results] [SKIPPED] >> test.py::test[join-inner_trivial--Results] >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] Test command err: 2025-11-03T14:03:11.428221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:11.467426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:11.469692Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:11.469762Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:11.469794Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/00324b/r3tmp/tmpnyusyi/pdisk_1.dat 2025-11-03T14:03:11.539508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:11.539553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:11.552610Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:11.553606Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178590961931 != 1762178590961935 2025-11-03T14:03:11.587189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:11.637907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:11.671154Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:11.754766Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-11-03T14:03:11.754796Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-11-03T14:03:11.754824Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:650:2545] 2025-11-03T14:03:11.773686Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:650:2545] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-11-03T14:03:11.773729Z node 1 :TX_PROXY DEBUG: schemereq.cpp:616: Actor# [1:650:2545] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-11-03T14:03:11.773949Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1741: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-11-03T14:03:11.773968Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1731: Actor# [1:650:2545] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-11-03T14:03:11.774042Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1564: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-11-03T14:03:11.774091Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1611: Actor# [1:650:2545] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-11-03T14:03:11.774109Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:650:2545] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-11-03T14:03:11.774188Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1466: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvClientConnected 2025-11-03T14:03:11.774786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:11.775106Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1488: Actor# [1:650:2545] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-11-03T14:03:11.775124Z node 1 :TX_PROXY DEBUG: schemereq.cpp:596: Actor# [1:650:2545] txid# 281474976715657 SEND to# [1:590:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-11-03T14:03:11.789745Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:03:11.789982Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:03:11.790048Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:674:2566] 2025-11-03T14:03:11.790101Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:11.800001Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:03:11.800187Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:11.800215Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:11.800393Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:11.800402Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:11.800411Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:11.800471Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:11.800493Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:11.800505Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-11-03T14:03:11.810844Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:11.821207Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:11.821296Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:11.821324Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-11-03T14:03:11.821330Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:11.821335Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:11.821341Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:11.821412Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:674:2566], Recipient [1:674:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:11.821421Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:11.821500Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:11.821518Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:11.821591Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:11.821597Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:11.821601Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-11-03T14:03:11.821605Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-11-03T14:03:11.821608Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-11-03T14:03:11.821611Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:11.821615Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:11.821625Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:681:2570], Recipient [1:674:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:11.821628Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:11.821633Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:673:2565], serverId# [1:681:2570], sessionId# [0:0:0] 2025-11-03T14:03:11.821641Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:681:2570] 2025-11-03T14:03:11.821646Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-11-03T14:03:11.821661Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:11.821699Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-11-03T14:03:11.821706Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:11.821719Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:11.821728Z node ... 2025-11-03T14:03:14.354060Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-11-03T14:03:14.354137Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435082, Sender [2:1000:2789], Recipient [2:891:2701]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-11-03T14:03:14.354143Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-11-03T14:03:14.354155Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269287428, Sender [2:1000:2789], Recipient [2:974:2765]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976710663 ShardId: 72075186224037890 2025-11-03T14:03:14.354160Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2069: [ReadTable [2:974:2765] TxId# 281474976710662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-11-03T14:03:14.354166Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2160: [ReadTable [2:974:2765] TxId# 281474976710662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 2 rows at [2:1000:2789] 2025-11-03T14:03:14.354189Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976710663, MessageQuota: 1 2025-11-03T14:03:14.354245Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976710663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-11-03T14:03:14.354272Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269550080, Sender [2:1000:2789], Recipient [2:974:2765]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976710663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-11-03T14:03:14.354280Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1700: [ReadTable [2:974:2765] TxId# 281474976710662] Received stream data from ShardId# 72075186224037890 2025-11-03T14:03:14.354285Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1711: [ReadTable [2:974:2765] TxId# 281474976710662] Sending TEvStreamDataAck to [2:1000:2789] ShardId# 72075186224037890 2025-11-03T14:03:14.354297Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269287428, Sender [2:1000:2789], Recipient [2:974:2765]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976710663 ShardId: 72075186224037890 2025-11-03T14:03:14.354302Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2069: [ReadTable [2:974:2765] TxId# 281474976710662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-11-03T14:03:14.354308Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976710663, PendingAcks: 0 2025-11-03T14:03:14.354359Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269287941, Sender [2:973:2765], Recipient [2:974:2765]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976710662 MessageSizeLimit: 1 ReservedMessages: 1 2025-11-03T14:03:14.354365Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2097: [ReadTable [2:974:2765] TxId# 281474976710662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-11-03T14:03:14.354369Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2160: [ReadTable [2:974:2765] TxId# 281474976710662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 1 rows at [2:1000:2789] 2025-11-03T14:03:14.354382Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976710663, MessageQuota: 1 2025-11-03T14:03:14.354392Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976710663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-11-03T14:03:14.354411Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1232: StateReadTable, received event# 269550080, Sender [2:1000:2789], Recipient [2:974:2765]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976710663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\005\000\000\000b\005\0357\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\005\000\000\000" 2025-11-03T14:03:14.354415Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1700: [ReadTable [2:974:2765] TxId# 281474976710662] Received stream data from ShardId# 72075186224037890 2025-11-03T14:03:14.354419Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1711: [ReadTable [2:974:2765] TxId# 281474976710662] Sending TEvStreamDataAck to [2:1000:2789] ShardId# 72075186224037890 2025-11-03T14:03:14.354433Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2933: [ReadTable [2:974:2765] TxId# 281474976710662] RESPONSE Status# ExecComplete prepare time: 0.012250s execute time: 0.152208s total time: 0.164458s 2025-11-03T14:03:14.354476Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976710663, PendingAcks: 0 2025-11-03T14:03:14.354483Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976710663, MessageQuota: 0 2025-11-03T14:03:14.354550Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553190, Sender [2:974:2765], Recipient [2:888:2699]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976710662 2025-11-03T14:03:14.354612Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2025-11-03T14:03:14.354618Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976710663, at: 72075186224037890 2025-11-03T14:03:14.354673Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [2:891:2701], Recipient [2:891:2701]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:14.354680Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:14.354688Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-11-03T14:03:14.354693Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-11-03T14:03:14.354698Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710663] at 72075186224037890 for ReadTableScan 2025-11-03T14:03:14.354702Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710663] at 72075186224037890 on unit ReadTableScan 2025-11-03T14:03:14.354708Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976710663] at 72075186224037890 error: , IsFatalError: 0 2025-11-03T14:03:14.354714Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710663] at 72075186224037890 is Executed 2025-11-03T14:03:14.354718Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710663] at 72075186224037890 executing on unit ReadTableScan 2025-11-03T14:03:14.354722Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710663] at 72075186224037890 to execution unit FinishPropose 2025-11-03T14:03:14.354727Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710663] at 72075186224037890 on unit FinishPropose 2025-11-03T14:03:14.354733Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710663] at 72075186224037890 is DelayComplete 2025-11-03T14:03:14.354737Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710663] at 72075186224037890 executing on unit FinishPropose 2025-11-03T14:03:14.354741Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710663] at 72075186224037890 to execution unit CompletedOperations 2025-11-03T14:03:14.354745Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710663] at 72075186224037890 on unit CompletedOperations 2025-11-03T14:03:14.354753Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710663] at 72075186224037890 is Executed 2025-11-03T14:03:14.354756Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710663] at 72075186224037890 executing on unit CompletedOperations 2025-11-03T14:03:14.354760Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:281474976710663] at 72075186224037890 has finished 2025-11-03T14:03:14.354764Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:14.354768Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2025-11-03T14:03:14.354772Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-11-03T14:03:14.354778Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2025-11-03T14:03:14.354787Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-11-03T14:03:14.354791Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976710663] at 72075186224037890 on unit FinishPropose 2025-11-03T14:03:14.354797Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976710663 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-11-03T14:03:14.354810Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-11-03T14:03:14.362879Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549569, Sender [2:974:2765], Recipient [2:891:2701]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976710663 2025-11-03T14:03:14.362915Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3199: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-11-03T14:03:14.362924Z node 2 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037890 txId 281474976710663 2025-11-03T14:03:14.362953Z node 2 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037890 txId 281474976710663 2025-11-03T14:03:14.363020Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287431, Sender [2:974:2765], Recipient [2:891:2701]: NKikimrTx.TEvInterruptTransaction TxId: 281474976710663 2025-11-03T14:03:14.363028Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-11-03T14:03:14.363048Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553190, Sender [2:974:2765], Recipient [2:891:2701]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976710662 >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> test.py::test[table_range-each_with_non_existing--Results] [GOOD] |55.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::SchemaChanges [GOOD] Test command err: 2025-11-03T14:03:06.277148Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:06.301478Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:06.303025Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:06.303078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:06.303101Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0027eb/r3tmp/tmpUZ39d5/pdisk_1.dat 2025-11-03T14:03:06.380210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:06.380256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:06.389024Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:06.390039Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178585856779 != 1762178585856783 2025-11-03T14:03:06.421188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:06.470014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:06.513694Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:06.588050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:06.602400Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:674:2566] 2025-11-03T14:03:06.602474Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:06.609630Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:06.609678Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:06.609814Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:06.609821Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:06.609826Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:06.609872Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:06.609893Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:06.609905Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-11-03T14:03:06.620224Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:06.625643Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:06.625714Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:06.625737Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-11-03T14:03:06.625743Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:06.625749Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:06.625755Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:06.625901Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:06.625923Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:06.626010Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:06.626017Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:06.626026Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:06.626032Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:06.626046Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:673:2565], serverId# [1:681:2570], sessionId# [0:0:0] 2025-11-03T14:03:06.626067Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:06.626112Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:06.626131Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:06.626435Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:06.637082Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:03:06.637128Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-11-03T14:03:06.773159Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:708:2586], serverId# [1:709:2587], sessionId# [0:0:0] 2025-11-03T14:03:06.774322Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-11-03T14:03:06.774350Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:06.774542Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:06.774555Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-11-03T14:03:06.774566Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-11-03T14:03:06.774662Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-11-03T14:03:06.774699Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-11-03T14:03:06.774779Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:06.774795Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-11-03T14:03:06.775250Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-11-03T14:03:06.775383Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:06.775728Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-11-03T14:03:06.775742Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:06.775886Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-11-03T14:03:06.775901Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:06.776100Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:06.776227Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:06.776241Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:06.776248Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-11-03T14:03:06.776264Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:06.776276Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-11-03T14:03:06.776289Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:06.777633Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-11-03T14:03:06.777787Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-11-03T14:03:06.777801Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72 ... G: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:14.157024Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:14.157038Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:14.157064Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:14.157082Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-11-03T14:03:14.157099Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:14.157827Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-11-03T14:03:14.157844Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-11-03T14:03:14.159509Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:882:2708], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:14.159537Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:892:2713], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:14.159608Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:14.159750Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:898:2718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:14.159770Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:14.160683Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:14.162119Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:14.335289Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:14.336098Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:896:2716], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-11-03T14:03:14.364119Z node 4 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [4:954:2755] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:14.398356Z node 4 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k950exbf580zycfrx8q8jwk0, Database: , SessionId: ydb://session/3?node_id=4&id=OTAzOTJiOWQtNzhjOTI3MWEtMjQ0MWRlMTItN2E0NTY4OTU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:14.420267Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:985:2772], serverId# [4:986:2773], sessionId# [0:0:0] 2025-11-03T14:03:14.420610Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037888 2025-11-03T14:03:14.420743Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1762178594420655 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 32b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-11-03T14:03:14.420827Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037888, row count=1 2025-11-03T14:03:14.435129Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 32 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-11-03T14:03:14.435169Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:14.457303Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:188) 2025-11-03T14:03:14.458204Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:14.458275Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715662 ssId 72057594046644480 seqNo 2:3 2025-11-03T14:03:14.458300Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 3 current version# 2 expected version# 3 at tablet# 72075186224037888 txId# 281474976715662 2025-11-03T14:03:14.458308Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715662 at tablet 72075186224037888 2025-11-03T14:03:14.470975Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:03:14.591923Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715662 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715662 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-11-03T14:03:14.591962Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:14.592007Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:14.592018Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-11-03T14:03:14.592030Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715662] in PlanQueue unit at 72075186224037888 2025-11-03T14:03:14.592138Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715662 keys extracted: 0 2025-11-03T14:03:14.592185Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-11-03T14:03:14.592272Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:14.592300Z node 4 :TX_DATASHARD INFO: alter_table_unit.cpp:145: Trying to ALTER TABLE at 72075186224037888 version 3 2025-11-03T14:03:14.592525Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1849: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 2], version# 3, step# 2500, txId# 281474976715662, at tablet# 72075186224037888 2025-11-03T14:03:14.592577Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 0 Step: 2500 TxId: 281474976715662 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcSchemaChange Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 3 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-11-03T14:03:14.592703Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:14.593311Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2025-11-03T14:03:14.593326Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:14.593905Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:14.593925Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 3 } 2025-11-03T14:03:14.593947Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [4:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:14.593971Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-11-03T14:03:14.593987Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 3 } 2025-11-03T14:03:14.593993Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:14.594620Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-11-03T14:03:14.602737Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-11-03T14:03:14.604719Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1028:2810], serverId# [4:1029:2811], sessionId# [0:0:0] 2025-11-03T14:03:14.619177Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1031:2813], serverId# [4:1032:2814], sessionId# [0:0:0] >> test.py::test[bigdate-implicit_cast_callable-default.txt-Results] [GOOD] >> test.py::test[bigdate-table_int_cast-default.txt-ForceBlocks] >> test.py::test[table_range-range_with_view--Results] >> test.py::test[join-inner_with_order--ForceBlocks] [GOOD] >> test.py::test[join-inner_with_order--Results] >> test.py::test[pg-tpcds-q60-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q60-default.txt-Results] |55.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |55.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] |55.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |55.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |55.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema >> test.py::test[blocks-interval_mul_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-interval_mul_scalar--Results] >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Results] >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> IncrementalBackup::SimpleRestore [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental >> test.py::test[schema-select_all_inferschema--ForceBlocks] [GOOD] >> test.py::test[schema-select_all_inferschema--Results] >> test.py::test[pg-tpcds-q53-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q54-default.txt-Results] |55.5%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> test.py::test[insert-append--ForceBlocks] [GOOD] >> test.py::test[insert-append--Results] |55.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |55.5%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber >> CommitOffset::Commit_WithoutSession_TopPast >> test.py::test[join-pullup_exclusion-off-ForceBlocks] [GOOD] >> test.py::test[join-pullup_exclusion-off-Results] [SKIPPED] |55.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |55.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |55.6%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] |55.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots >> TopicAutoscaling::PartitionSplit_PQv1 >> test.py::test[expr-non_persistable_insert_into_fail--Results] [GOOD] >> Balancing::Balancing_OneTopic_TopicApi >> test.py::test[hor_join-filters--Results] |55.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots >> test.py::test[join-flatten_columns2--Results] [GOOD] >> test.py::test[join-grace_join1-grace-ForceBlocks] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-compare_tuple--ForceBlocks] >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK >> TopicTimestamp::TimestampRead_6MB_Topic_exact_WithRestart >> test.py::test[blocks-pg_to_dates--Results] [GOOD] >> test.py::test[blocks-sort_two_desc--Results] >> IncrementalBackup::MultiBackup [GOOD] >> IncrementalBackup::E2EBackupCollection >> test.py::test[aggregate-agg_phases_table3-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Results] >> test.py::test[aggr_factory-hll-default.txt-Results] [GOOD] >> test.py::test[aggr_factory-sum_if-default.txt-Results] |55.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |55.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |55.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |55.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |55.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |55.6%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb >> test.py::test[in-in_enum_single0-default.txt-ForceBlocks] [GOOD] >> test.py::test[in-in_enum_single0-default.txt-Results] >> test.py::test[pg-tpcds-q60-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q64-default.txt-ForceBlocks] >> IncrementalBackup::BackupRestore [GOOD] >> IncrementalBackup::ComplexBackupBackupCollection >> test.py::test[blocks-combine_all_minmax_double--ForceBlocks] [GOOD] >> test.py::test[blocks-combine_all_minmax_double--Results] >> test.py::test[pg-tpcds-q03-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q03-default.txt-Results] >> test.py::test[window-yql-14738-default.txt-Results] [GOOD] >> test.py::test[ypath-limit_with_key-default.txt-ForceBlocks] >> test.py::test[order_by-assume_with_transform_desc--ForceBlocks] [GOOD] >> test.py::test[order_by-assume_with_transform_desc--Results] >> TopicDescribe::RetentionChangesStartOffset >> test.py::test[join-inner_with_order--Results] [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted--ForceBlocks] >> test.py::test[schema-select_all_inferschema--Results] [GOOD] >> test.py::test[insert-append--Results] [GOOD] >> test.py::test[join-alias_where_group--ForceBlocks] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Results] [GOOD] >> test.py::test[window-win_func_aggr_4func--ForceBlocks] >> test.py::test[produce-process_lambda_opt_args-default.txt-Results] [GOOD] |55.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[join-pullup_exclusion-off-Results] [SKIPPED] >> test.py::test[produce-process_with_python_as_struct-default.txt-Results] >> test.py::test[limit-insert_with_limit-dynamic-ForceBlocks] [GOOD] >> test.py::test[limit-insert_with_limit-dynamic-Results] >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK >> test.py::test[window-win_func_first_last--Results] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names--Results] |55.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |55.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |55.6%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental >> test.py::test[window-current/aggregations--Results] [GOOD] >> test.py::test[window-current/ansi_current_with_win--Results] >> test.py::test[window-win_func_aggr_4func_sort_desc--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort_desc--Results] >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> test.py::test[sampling-sort-default.txt-Results] [GOOD] >> test.py::test[optimizers-sort_constraint_in_left--Results] [GOOD] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-Results] >> test.py::test[pg-tpcds-q67-default.txt-ForceBlocks] [GOOD] >> test.py::test[sampling-system_sampling-io_block_size-Results] [SKIPPED] >> test.py::test[sampling-topsort-default.txt-Results] >> test.py::test[blocks-date_top_sort--Results] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys--Results] >> test.py::test[pg-tpcds-q03-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q14-default.txt-ForceBlocks] |55.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[schema-select_all_inferschema--Results] [GOOD] >> test.py::test[blocks-interval_mul_scalar--Results] [GOOD] >> test.py::test[blocks-lazy_nonstrict_nested--ForceBlocks] >> test.py::test[blocks-combine_all_minmax_double--Results] [GOOD] >> test.py::test[blocks-date_equals_scalar--ForceBlocks] >> test.py::test[in-in_enum_single0-default.txt-Results] [GOOD] >> test.py::test[insert-append_sorted-to_sorted-ForceBlocks] |55.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |55.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |55.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber >> CommitOffset::PartitionSplit_OffsetCommit >> test.py::test[order_by-assume_with_transform_desc--Results] [GOOD] >> test.py::test[order_by-literal--ForceBlocks] >> test.py::test[pg-select_subquery2-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q16-default.txt-Results] >> test.py::test[join-inner_trivial--Results] [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental >> test.py::test[flatten_by-struct_without_correlation-default.txt-Results] [GOOD] >> test.py::test[hor_join-fuse_multi_outs2--Results] >> test.py::test[limit-insert_with_limit-dynamic-Results] [GOOD] >> test.py::test[limit-many_top_sorts-default.txt-ForceBlocks] >> test.py::test[join-left_null_literal-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner-off-Results] >> test.py::test[hor_join-fuse_multi_outs2--Results] [SKIPPED] >> test.py::test[hor_join-group_ranges--Results] |55.6%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[pg-tpcds-q67-default.txt-ForceBlocks] [GOOD] >> IncrementalBackup::MultiShardIncrementalRestore [GOOD] >> IncrementalBackup::ShopDemoIncrementalBackupScenario >> test.py::test[join-lookupjoin_inner-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_inner_1o2o--Results] |55.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |55.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |55.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes >> TopicAutoscaling::ControlPlane_CreateAlterDescribe >> test.py::test[pg-tpcds-q54-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q57-default.txt-Results] >> test.py::test[blocks-combine_hashed_sum--Results] [GOOD] >> test.py::test[blocks-complex_scalars--Results] >> test.py::test[aggregate-group_by_mul_gs_gs--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_duo--Results] >> test.py::test[hor_join-filters--Results] [GOOD] >> test.py::test[hor_join-out_sampling--Results] >> test.py::test[join-grace_join1-grace-ForceBlocks] [GOOD] >> test.py::test[join-grace_join1-grace-Results] >> test.py::test[window-win_func_aggr_4func_sort_desc--Results] [GOOD] >> test.py::test[join-grace_join1-grace-Results] [SKIPPED] >> test.py::test[join-inner_all_right--ForceBlocks] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--ForceBlocks] >> test.py::test[table_range-range_with_view--Results] [GOOD] >> test.py::test[table_range-table_funcs_expr--Results] >> test.py::test[aggregate-compare_tuple--ForceBlocks] [GOOD] >> test.py::test[aggregate-compare_tuple--Results] >> test.py::test[blocks-sort_two_desc--Results] [GOOD] >> test.py::test[case-case_val_when_then-default.txt-Results] >> test.py::test[ypath-limit_with_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[ypath-limit_with_key-default.txt-Results] >> test.py::test[produce-process_with_python_as_struct-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf_validate-default.txt-Results] >> IncrementalBackup::E2EBackupCollection [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore+WithIncremental >> test.py::test[aggregate-aggregate_with_lambda_inside_avg--Results] [GOOD] >> test.py::test[aggregate-aggregation_and_order-default.txt-Results] >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> test.py::test[join-join_with_duplicate_keys_on_sorted--ForceBlocks] [GOOD] >> test.py::test[join-join_with_duplicate_keys_on_sorted--Results] >> test.py::test[bigdate-table_int_cast-default.txt-ForceBlocks] [GOOD] >> test.py::test[bigdate-table_int_cast-default.txt-Results] >> test.py::test[join-alias_where_group--ForceBlocks] [GOOD] >> test.py::test[join-alias_where_group--Results] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> IncrementalBackup::ComplexBackupBackupCollection [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental >> test.py::test[blocks-lazy_nonstrict_nested--ForceBlocks] [GOOD] >> test.py::test[blocks-lazy_nonstrict_nested--Results] >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK >> CompDefrag::DoesItWork [GOOD] >> CompDefrag::DelayedCompaction >> test.py::test[order_by-literal--ForceBlocks] [GOOD] >> test.py::test[ypath-limit_with_key-default.txt-Results] [GOOD] >> test.py::test[ypath-limit_with_range-default.txt-ForceBlocks] >> test.py::test[order_by-literal--Results] >> test.py::test[pg-tpcds-q16-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q17-default.txt-Results] >> test.py::test[aggr_factory-sum_if-default.txt-Results] [GOOD] >> test.py::test[aggregate-agg_filter_pushdown--Results] >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental >> test.py::test[window-win_func_aggr_4func--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_4func--Results] >> test.py::test[sampling-topsort-default.txt-Results] [GOOD] >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> test.py::test[aggregate-compare_tuple--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-ForceBlocks] >> test.py::test[limit-many_top_sorts-default.txt-ForceBlocks] [GOOD] >> test.py::test[limit-many_top_sorts-default.txt-Results] >> test.py::test[optimizers-test_fuse_map_predicate_limit-default.txt-Results] [GOOD] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2025-11-03T14:02:58.666582Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.666593Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.666597Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:58.674717Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:58.687033Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:58.687135Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.687270Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:58.687414Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.690684Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-11-03T14:02:58.694688Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:58.694717Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-11-03T14:02:58.702936Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.702953Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.702957Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:58.703031Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:58.715464Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:58.721196Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.722704Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:58.727491Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.727547Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-11-03T14:02:58.727575Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:58.727585Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-11-03T14:02:58.727909Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.727914Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.727918Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:58.728018Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:58.728206Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:58.728641Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.729645Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:58.730139Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.730198Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-11-03T14:02:58.730222Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:58.730231Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-11-03T14:02:58.738962Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.738972Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.738976Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:58.739049Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:58.751918Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:58.752004Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.754721Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:58.755845Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.755927Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-11-03T14:02:58.755951Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:58.755959Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-11-03T14:02:58.756282Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.756287Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.756291Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:58.756367Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:58.756549Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:58.756592Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.760223Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:58.760360Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.760390Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-11-03T14:02:58.760410Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:58.760419Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-11-03T14:02:58.760632Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.760638Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.760643Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:58.760720Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:58.761411Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:58.761455Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.762692Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:58.762791Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.762831Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-11-03T14:02:58.766670Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:58.766691Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-11-03T14:02:58.770781Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.770789Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.770795Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:58.770868Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:58.771062Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:58.771107Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.774724Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:58.774951Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.778404Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-11-03T14:02:58.781327Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:58.781349Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-11-03T14:02:58.798939Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.798950Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.798954Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:02:58.812466Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:02:58.819714Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:02:58.830113Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.830302Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:02:58.830665Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:02:58.830763Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-11-03T14:02:58.830782Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:02:58.830791Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-11-03T14:02:58.865243Z :ReadSession INFO: Random seed for debugging is 1762178578865233 2025-11-03T14:02:59.207112Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499369080909149:2167];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:02:59.207142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/002deb/r3tmp/tmpMvvZ04/pdisk_1.dat 2025-11-03T14:02:59.219457Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:02:59.220171Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:02:59.306892Z node 1 :KQP_PROXY ... : 1 2025-11-03T14:03:21.486753Z node 2 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:21.587055Z node 2 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:21.587078Z node 2 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:21.587081Z node 2 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:21.687425Z node 2 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:21.687444Z node 2 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:21.687449Z node 2 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:21.787757Z node 2 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:21.787776Z node 2 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:21.787779Z node 2 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:21.888107Z node 2 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:21.888124Z node 2 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:21.888126Z node 2 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:21.988717Z node 2 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:21.988737Z node 2 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:21.988741Z node 2 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:22.088957Z node 2 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:22.088983Z node 2 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:22.088986Z node 2 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:22.189133Z node 2 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:22.189149Z node 2 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:22.189151Z node 2 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:22.289538Z node 2 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:22.289553Z node 2 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:22.289557Z node 2 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:22.389869Z node 2 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:22.389898Z node 2 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:22.389900Z node 2 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:22.490231Z node 2 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:22.490253Z node 2 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:22.490255Z node 2 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:22.590560Z node 2 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:22.590573Z node 2 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:22.590576Z node 2 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:22.690909Z node 2 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:22.690940Z node 2 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:22.690943Z node 2 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:22.791316Z node 2 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:22.791341Z node 2 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:22.791345Z node 2 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:22.792801Z node 1 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1277: session cookie 1 consumer shared/user session shared/user_1_1_6921453095314857440_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset 3 2025-11-03T14:03:22.891357Z :INFO: [/Root] [/Root] [e44892ef-bc599bfc-a0660c6f-a9f5fa75] Closing read session. Close timeout: 0.000000s 2025-11-03T14:03:22.891409Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-11-03T14:03:22.891429Z :INFO: [/Root] [/Root] [e44892ef-bc599bfc-a0660c6f-a9f5fa75] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16377 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-11-03T14:03:22.891451Z :NOTICE: [/Root] [/Root] [e44892ef-bc599bfc-a0660c6f-a9f5fa75] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-11-03T14:03:22.891461Z :DEBUG: [/Root] [/Root] [e44892ef-bc599bfc-a0660c6f-a9f5fa75] [dc1] Abort session to cluster 2025-11-03T14:03:22.891752Z :NOTICE: [/Root] [/Root] [e44892ef-bc599bfc-a0660c6f-a9f5fa75] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-11-03T14:03:22.891640Z node 2 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:22.891656Z node 2 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:22.891659Z node 2 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:22.892172Z node 1 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_1_1_6921453095314857440_v1 grpc read done: success# 0, data# { } 2025-11-03T14:03:22.892192Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_1_1_6921453095314857440_v1 grpc read failed 2025-11-03T14:03:22.892201Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_1_1_6921453095314857440_v1 grpc closed 2025-11-03T14:03:22.892219Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_1_1_6921453095314857440_v1 is DEAD 2025-11-03T14:03:22.892428Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2258: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_6921453095314857440_v1 2025-11-03T14:03:22.892535Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2746: [PQ: 72075186224037892] server disconnected, pipe [1:7568499399145682152:2473] destroyed 2025-11-03T14:03:22.892630Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_1_1_6921453095314857440_v1 2025-11-03T14:03:22.894046Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037893][rt3.dc1--test-topic] pipe [1:7568499399145682149:2470] disconnected. 2025-11-03T14:03:22.894058Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037893][rt3.dc1--test-topic] pipe [1:7568499399145682149:2470] disconnected; active server actors: 1 2025-11-03T14:03:22.894061Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037893][rt3.dc1--test-topic] pipe [1:7568499399145682149:2470] client user disconnected session shared/user_1_1_6921453095314857440_v1 2025-11-03T14:03:23.468917Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:03:23.468928Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:03:23.468933Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-11-03T14:03:23.469048Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-11-03T14:03:23.469166Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-11-03T14:03:23.469222Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:03:23.469701Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-11-03T14:03:23.469965Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-11-03T14:03:23.470003Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-11-03T14:03:23.470380Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-11-03T14:03:23.470396Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-11-03T14:03:23.470408Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-11-03T14:03:23.470417Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-11-03T14:03:23.470457Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-11-03T14:03:23.470469Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> test.py::test[pg-tpcds-q64-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q64-default.txt-Results] >> test.py::test[blocks-lazy_nonstrict_nested--Results] [GOOD] >> test.py::test[blocks-member--ForceBlocks] >> test.py::test[join-join_with_duplicate_keys_on_sorted--Results] [GOOD] >> test.py::test[hor_join-group_ranges--Results] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--ForceBlocks] >> test.py::test[hor_join-yield_on-default.txt-Results] >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_AutoscaleAwareSDK >> test.py::test[join-inner_all_right--ForceBlocks] [GOOD] >> test.py::test[join-inner_all_right--Results] >> test.py::test[order_by-literal--Results] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey--ForceBlocks] |55.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[sampling-topsort-default.txt-Results] [GOOD] >> test.py::test[join-lookupjoin_inner_1o2o--Results] [GOOD] >> test.py::test[join-lookupjoin_not_selected-off-Results] [SKIPPED] >> test.py::test[case-case_val_when_then-default.txt-Results] [GOOD] >> test.py::test[blocks-complex_scalars--Results] [GOOD] >> test.py::test[blocks-date_sub--Results] >> test.py::test[column_group-hint-perusage-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_1o2o-off-Results] [SKIPPED] >> test.py::test[join-lookupjoin_semi_subq-off-Results] [SKIPPED] >> test.py::test[join-mapjoin_early_rewrite_sequence--Results] >> test.py::test[join-alias_where_group--Results] [GOOD] >> test.py::test[join-anyjoin_merge_nodup--ForceBlocks] >> test.py::test[pg-tpcds-q57-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q59-default.txt-Results] >> TopicAutoscaling::PartitionSplit_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK >> test.py::test[produce-process_with_udf_validate-default.txt-Results] [GOOD] >> test.py::test[produce-reduce_with_python_presort--Results] [SKIPPED] >> test.py::test[produce-reduce_with_python_row--Results] [SKIPPED] >> test.py::test[ql_filter-integer_optional_null--Results] >> Balancing::Balancing_OneTopic_TopicApi [GOOD] >> Balancing::Balancing_OneTopic_PQv1 >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> TopicAutoscaling::ControlPlane_CreateAlterDescribe [GOOD] >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> test.py::test[window-win_func_aggr_4func--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort--ForceBlocks] >> test.py::test[window-current/ansi_current_with_win--Results] [GOOD] >> test.py::test[window-current/session_incompat_sort--Results] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--ForceBlocks] [GOOD] >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--Results] >> IncrementalBackup::ForgedMultiShardIncrementalRestore+WithIncremental [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore-WithIncremental |55.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |55.7%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |55.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |55.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::VerifyIncrementalBackupTableAttributes >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> TopicDescribe::RetentionChangesStartOffset [GOOD] >> TopicTimestamp::TimestampRead_1MB_Topic_exact_WithRestart |55.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[column_group-hint-perusage-Results] [SKIPPED] >> test.py::test[limit-many_top_sorts-default.txt-Results] [GOOD] >> test.py::test[limit-sort_calc_limit--ForceBlocks] >> test.py::test[bigdate-table_int_cast-default.txt-Results] [GOOD] |55.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> test.py::test[blocks-bitcast_block--ForceBlocks] >> test.py::test[blocks-date_equals_scalar--ForceBlocks] [GOOD] >> test.py::test[blocks-date_equals_scalar--Results] >> test.py::test[ypath-limit_with_range-default.txt-ForceBlocks] [GOOD] >> test.py::test[ypath-limit_with_range-default.txt-Results] >> test.py::test[table_range-table_funcs_expr--Results] [GOOD] >> test.py::test[tpch-q1-default.txt-Results] >> test.py::test[join-inner_all_right--Results] [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental |55.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> test.py::test[pg-tpcds-q64-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q99-default.txt-ForceBlocks] >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl >> test.py::test[pg-tpcds-q17-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q38-default.txt-Results] |55.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |55.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |55.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot >> CommitOffset::Commit_WithoutSession_TopPast [GOOD] >> CommitOffset::Commit_WithWrongSession_ToParent >> test.py::test[pg-tpcds-q14-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q14-default.txt-Results] >> test.py::test[aggregate-aggregation_and_order-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-Results] |55.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_bsvolume_reboots/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] |55.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[join-inner_all_right--Results] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names--Results] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--Results] >> test.py::test[blocks-member--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-member--Results] >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2025-11-03T14:03:09.418678Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1762178589418668 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0025da/r3tmp/tmpKYAGt6/pdisk_1.dat 2025-11-03T14:03:09.884327Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:09.884505Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:03:09.884661Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7568499412505679404:2152];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:09.884783Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:03:09.887365Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:03:09.946725Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:09.946777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:09.960090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:09.960117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:09.967504Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-11-03T14:03:09.967911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:09.975089Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:09.976594Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7568499412505679287:2073] 1762178589871818 != 1762178589871821 2025-11-03T14:03:09.985495Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:09.983138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:09.983162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:09.992935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20999, node 1 2025-11-03T14:03:10.030672Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ud3d/0025da/r3tmp/yandexu16pGP.tmp 2025-11-03T14:03:10.030687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ud3d/0025da/r3tmp/yandexu16pGP.tmp 2025-11-03T14:03:10.030762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ud3d/0025da/r3tmp/yandexu16pGP.tmp 2025-11-03T14:03:10.030811Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:03:10.035864Z INFO: TTestServer started on Port 24832 GrpcPort 20999 TClient is connected to server localhost:24832 PQClient connected to localhost:20999 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:10.087682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-11-03T14:03:10.246687Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:10.496797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:10.494754Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499416800646957:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:10.494783Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499416800646946:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:10.494807Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:10.495472Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499416800646961:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:10.495480Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:10.508732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976720657, at schemeshard: 72057594046644480 2025-11-03T14:03:10.508896Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7568499416800646960:2300], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-11-03T14:03:10.593323Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7568499416184680135:2330], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-11-03T14:03:10.593424Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2506: SessionId: ydb://session/3?node_id=1&id=NTY5MTk5NjYtMTNlNDBkMWUtOWE2YWM0M2UtYTIwMmY3OWM=, ActorId: [1:7568499416184680084:2321], ActorState: ExecuteState, TraceId: 01k950essy9984zc9vyz3e7td9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-11-03T14:03:10.593971Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-11-03T14:03:10.597086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:10.616457Z node 2 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [2:7568499416800646990:2135] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:10.628853Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7568499416800646997:2305], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-11-03T14:03:10.629451Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2506: SessionId: ydb://session/3?node_id=2&id=YjI0NjE1MTUtMjM0ODc1YzgtOWJkNjk5Y2ItOWU1ZjQzZWM=, ActorId: [2:7568499416800646944:2295], ActorState: ExecuteState, TraceId: 01k950esrx354sg6k35bf9w204, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-11-03T14:03:10.629604Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { ... p:1659: [72075186224037893][rt3.dc1--test-topic] pipe [5:7568499501467051435:2472] disconnected. 2025-11-03T14:03:30.308545Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037893][rt3.dc1--test-topic] pipe [5:7568499501467051435:2472] disconnected; active server actors: 1 2025-11-03T14:03:30.308556Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1695: [72075186224037893][rt3.dc1--test-topic] pipe [5:7568499501467051435:2472] disconnected no session 2025-11-03T14:03:30.327474Z node 5 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [5:7568499501467051399:2472] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-11-03T14:03:30.327490Z node 5 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [5:7568499501467051399:2472] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-11-03T14:03:30.327494Z node 5 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [5:7568499501467051399:2472] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-11-03T14:03:30.327514Z node 5 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-11-03T14:03:30.327695Z node 5 :PQ_WRITE_PROXY DEBUG: writer.cpp:822: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 5, Generation: 1 2025-11-03T14:03:30.327712Z node 5 :PERSQUEUE DEBUG: pq_impl.cpp:2721: [PQ: 72075186224037892] server connected, pipe [5:7568499501467051455:2472], now have 1 active actors on pipe 2025-11-03T14:03:30.327721Z node 5 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-11-03T14:03:30.327727Z node 5 :PERSQUEUE DEBUG: pq_impl.cpp:2635: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-11-03T14:03:30.327752Z node 5 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src|ecc3265e-87f73bad-7c655794-9a2e3ebf_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-11-03T14:03:30.327768Z node 5 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:30.327774Z node 5 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:30.327777Z node 5 :PERSQUEUE DEBUG: partition.cpp:2302: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2025-11-03T14:03:30.327789Z node 5 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:30.327805Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:35: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-11-03T14:03:30.327821Z node 5 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-11-03T14:03:30.327870Z node 5 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-11-03T14:03:30.327877Z node 5 :PERSQUEUE DEBUG: pq_impl.cpp:2635: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-11-03T14:03:30.327890Z node 5 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-11-03T14:03:30.327916Z node 5 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|ecc3265e-87f73bad-7c655794-9a2e3ebf_0 2025-11-03T14:03:30.328258Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1762178610328 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-11-03T14:03:30.328295Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|ecc3265e-87f73bad-7c655794-9a2e3ebf_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-11-03T14:03:30.328437Z :INFO: [] MessageGroupId [src] SessionId [src|ecc3265e-87f73bad-7c655794-9a2e3ebf_0] Write session: close. Timeout = 0 ms 2025-11-03T14:03:30.328450Z :INFO: [] MessageGroupId [src] SessionId [src|ecc3265e-87f73bad-7c655794-9a2e3ebf_0] Write session will now close 2025-11-03T14:03:30.328458Z :DEBUG: [] MessageGroupId [src] SessionId [src|ecc3265e-87f73bad-7c655794-9a2e3ebf_0] Write session: aborting 2025-11-03T14:03:30.328617Z :INFO: [] MessageGroupId [src] SessionId [src|ecc3265e-87f73bad-7c655794-9a2e3ebf_0] Write session: gracefully shut down, all writes complete 2025-11-03T14:03:30.328623Z :DEBUG: [] MessageGroupId [src] SessionId [src|ecc3265e-87f73bad-7c655794-9a2e3ebf_0] Write session: destroy 2025-11-03T14:03:30.328926Z node 5 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|ecc3265e-87f73bad-7c655794-9a2e3ebf_0 grpc read done: success: 0 data: 2025-11-03T14:03:30.328934Z node 5 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|ecc3265e-87f73bad-7c655794-9a2e3ebf_0 grpc read failed 2025-11-03T14:03:30.328939Z node 5 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|ecc3265e-87f73bad-7c655794-9a2e3ebf_0 grpc closed 2025-11-03T14:03:30.328943Z node 5 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|ecc3265e-87f73bad-7c655794-9a2e3ebf_0 is DEAD 2025-11-03T14:03:30.329158Z node 5 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-11-03T14:03:30.329228Z node 5 :PERSQUEUE DEBUG: pq_impl.cpp:2746: [PQ: 72075186224037892] server disconnected, pipe [5:7568499501467051455:2472] destroyed 2025-11-03T14:03:30.329240Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-11-03T14:03:30.329245Z node 5 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:30.329248Z node 5 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:30.329250Z node 5 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:30.330755Z :INFO: [/Root] [/Root] [99daf313-f4f02694-cb9864d-61bce868] Starting read session 2025-11-03T14:03:30.330765Z :DEBUG: [/Root] [/Root] [99daf313-f4f02694-cb9864d-61bce868] Starting session to cluster null (localhost:16028) 2025-11-03T14:03:30.331000Z :DEBUG: [/Root] [/Root] [99daf313-f4f02694-cb9864d-61bce868] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:03:30.331005Z :DEBUG: [/Root] [/Root] [99daf313-f4f02694-cb9864d-61bce868] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:03:30.331008Z :DEBUG: [/Root] [/Root] [99daf313-f4f02694-cb9864d-61bce868] [null] Reconnecting session to cluster null in 0.000000s 2025-11-03T14:03:30.331046Z :ERROR: [/Root] [/Root] [99daf313-f4f02694-cb9864d-61bce868] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2025-11-03T14:03:30.331051Z :DEBUG: [/Root] [/Root] [99daf313-f4f02694-cb9864d-61bce868] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:03:30.331052Z :DEBUG: [/Root] [/Root] [99daf313-f4f02694-cb9864d-61bce868] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:03:30.331063Z :INFO: [/Root] [/Root] [99daf313-f4f02694-cb9864d-61bce868] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2025-11-03T14:03:30.331093Z :NOTICE: [/Root] [/Root] [99daf313-f4f02694-cb9864d-61bce868] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-11-03T14:03:30.331096Z :DEBUG: [/Root] [/Root] [99daf313-f4f02694-cb9864d-61bce868] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2025-11-03T14:03:30.331103Z :INFO: [/Root] [/Root] [99daf313-f4f02694-cb9864d-61bce868] Closing read session. Close timeout: 0.000000s 2025-11-03T14:03:30.331107Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-11-03T14:03:30.331112Z :INFO: [/Root] [/Root] [99daf313-f4f02694-cb9864d-61bce868] Counters: { Errors: 1 CurrentSessionLifetimeMs: 0 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-11-03T14:03:30.331117Z :NOTICE: [/Root] [/Root] [99daf313-f4f02694-cb9864d-61bce868] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-11-03T14:03:30.367864Z node 5 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:30.367876Z node 5 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:30.367878Z node 5 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:30.468257Z node 5 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:30.468274Z node 5 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:30.468277Z node 5 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:30.568632Z node 5 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:30.568649Z node 5 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:30.568651Z node 5 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:30.669013Z node 5 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:30.669034Z node 5 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:30.669038Z node 5 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist >> test.py::test[window-win_func_aggr_with_qualified_all_no_simple_columns--Results] [GOOD] >> test.py::test[window-win_multiaggr_list-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-10737_lost_passthrough-default.txt-Results] [GOOD] >> test.py::test[order_by-SortByOneField--Results] >> test.py::test[ypath-limit_with_range-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q59-default.txt-Results] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey--ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_num_key_and_subkey--Results] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql >> test.py::test[aggregate-group_by_rollup_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_ru_join--Results] >> test.py::test[hor_join-yield_on-default.txt-Results] [GOOD] >> test.py::test[in-in_ansi_join--Results] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--Results] >> test.py::test[ql_filter-integer_optional_null--Results] [GOOD] >> test.py::test[sampling-bind_default-default.txt-Results] |55.7%| [TS] {default-linux-x86_64, relwithdebinfo} ydb/services/dynamic_config/ut/unittest >> test.py::test[hor_join-out_sampling--Results] [GOOD] >> test.py::test[in-in_immediate_subquery-default.txt-Results] |55.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query_reboots/unittest |55.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[ypath-limit_with_range-default.txt-Results] [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore-WithIncremental [GOOD] >> IncrementalBackup::E2EMultipleBackupRestoreCycles >> test.py::test[blocks-member--Results] [GOOD] >> TSchemeShardUserAttrsTest::SetAttrs >> test.py::test[aggregate-agg_filter_pushdown--Results] [GOOD] >> test.py::test[aggregate-agg_phases_table2-default.txt-Results] >> test.py::test[insert-append_sorted-to_sorted-ForceBlocks] [GOOD] >> test.py::test[insert-append_sorted-to_sorted-Results] >> test.py::test[blocks-bitcast_block--ForceBlocks] [GOOD] >> test.py::test[blocks-bitcast_block--Results] >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] >> IncrementalBackup::ShopDemoIncrementalBackupScenario [GOOD] >> IncrementalBackup::ResetOperationIncrementalBackup |55.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[pg-tpcds-q59-default.txt-Results] [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::DropBackupCollectionSqlPathResolution >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 >> test.py::test[aggregate-group_by_expr_columns_reuse-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_gs_with_rollup--ForceBlocks] >> CommitOffset::PartitionSplit_OffsetCommit [GOOD] >> CommitOffset::DistributedTxCommit_LongReadSession ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:03:32.710481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:03:32.710500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:03:32.710504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:03:32.710510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:03:32.710523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:03:32.710526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:03:32.710532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:03:32.710542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:03:32.710619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:03:32.710695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:03:32.724269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:03:32.724288Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:32.727233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:03:32.727295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:03:32.727323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:03:32.728800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:03:32.728857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:03:32.728937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:03:32.729078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:03:32.729474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:03:32.729508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:03:32.729674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:03:32.729681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:03:32.729699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:03:32.729705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:03:32.729710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:03:32.729734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:03:32.730894Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-11-03T14:03:32.746694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:03:32.746755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:03:32.746796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:03:32.746802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:03:32.746829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:03:32.746838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:32.747847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:03:32.747882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:03:32.747922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:03:32.747929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:03:32.747933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:03:32.747937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:03:32.748312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:03:32.748321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:03:32.748324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:03:32.748579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:03:32.748584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:03:32.748589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:03:32.748593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:03:32.749038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:03:32.749355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:03:32.749410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:03:32.749554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:03:32.749572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:03:32.749577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:03:32.749616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:03:32.749620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:03:32.749641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:03:32.749648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:03:32.750013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:03:32.750019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_ ... .cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:03:32.761201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 103:0 type: TxAlterUserAttributes target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:03:32.761226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:03:32.761235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2025-11-03T14:03:32.761710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:03:32.761753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: MyRoot 2025-11-03T14:03:32.761789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-11-03T14:03:32.761795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:97: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-11-03T14:03:32.761803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-11-03T14:03:32.761824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:03:32.762204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-11-03T14:03:32.762232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-11-03T14:03:32.762303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:03:32.762322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:03:32.762329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:114: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2025-11-03T14:03:32.762362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-11-03T14:03:32.762368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-11-03T14:03:32.762374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-11-03T14:03:32.762377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-11-03T14:03:32.762389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-11-03T14:03:32.762397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-11-03T14:03:32.762405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:03:32.762410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-11-03T14:03:32.762415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-11-03T14:03:32.762420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 103:0 2025-11-03T14:03:32.762428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-11-03T14:03:32.762434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-11-03T14:03:32.762439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-11-03T14:03:32.762908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:03:32.762922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:03:32.762959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:03:32.762966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 103, path id: 1 FAKE_COORDINATOR: Erasing txId 103 2025-11-03T14:03:32.763071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-11-03T14:03:32.763086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-11-03T14:03:32.763091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-11-03T14:03:32.763096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-11-03T14:03:32.763102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-11-03T14:03:32.763116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-11-03T14:03:32.763578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-11-03T14:03:32.763636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-11-03T14:03:32.763643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-11-03T14:03:32.763713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-11-03T14:03:32.763733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-11-03T14:03:32.763738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:345:2335] TestWaitNotification: OK eventTxId 103 2025-11-03T14:03:32.763813Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-11-03T14:03:32.763840Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 35us result status StatusSuccess 2025-11-03T14:03:32.763948Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TopicAutoscaling::Simple_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_PQv1 >> test.py::test[order_by-order_by_num_key_and_subkey--Results] [GOOD] >> test.py::test[order_by-order_by_tuple_expr-default.txt-ForceBlocks] |55.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl Test command err: FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=162179211.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=162179211.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=162179211.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=162179211.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=142179211.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=162179211.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=162179211.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=142178011.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=142179211.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=142179211.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=142178011.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=142178011.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=142178011.000000s;Name=;Codec=}; 2025-11-03T14:03:31.393682Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:107;event=initialize_shard;step=OnActivateExecutor; 2025-11-03T14:03:31.397019Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:126;event=initialize_shard;step=initialize_tiring_finished; 2025-11-03T14:03:31.397060Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-11-03T14:03:31.397594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:31.397629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:31.397656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:31.397670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:03:31.397688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:03:31.397701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:03:31.397715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:03:31.397728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:03:31.397741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:03:31.397754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:03:31.397767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:03:31.397782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:03:31.397795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:03:31.403711Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-11-03T14:03:31.403776Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-11-03T14:03:31.403786Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-11-03T14:03:31.403826Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-11-03T14:03:31.403876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:31.403891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:31.403898Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-11-03T14:03:31.403911Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-11-03T14:03:31.403922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:31.403931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:31.403936Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-11-03T14:03:31.403959Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-11-03T14:03:31.403969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:31.403976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:31.403981Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-11-03T14:03:31.403993Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-11-03T14:03:31.404002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:31.404011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:31.404016Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-11-03T14:03:31.404026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:31.404035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:31.404040Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-11-03T14:03:31.404051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:31.404059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:31.404065Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-11-03T14:03:31.404091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:31.404103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:31.40410 ... ateInitShard: { InitShard { OwnerPathId: 1 Tables { PathId: 1 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" TypeId: 50 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Options { CompactionPlannerConstructor { ClassName: "l-buckets" LBuckets { } } } } } TtlSettings { Enabled { ColumnName: "timestamp" Tiers { ApplyAfterSeconds: 162179211 Delete { } } } Version: 1 } } OwnerPath: "/Root/olapStore" } } 2025-11-03T14:03:31.626500Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Complete;fline=columnshard__init.cpp:158;step=TTxUpdateSchema.Complete; 2025-11-03T14:03:31.626572Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxInit.Execute at tablet 9437184 2025-11-03T14:03:31.626989Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=3; 2025-11-03T14:03:31.627055Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:special_valuesLoadingTime=53; 2025-11-03T14:03:31.627083Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:special_valuesLoadingTime=20; 2025-11-03T14:03:31.627114Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tables_managerLoadingTime=25; 2025-11-03T14:03:31.627138Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;TablesLoadingTime=4; 2025-11-03T14:03:31.627147Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2025-11-03T14:03:31.627156Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-11-03T14:03:31.627166Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2025-11-03T14:03:31.627201Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tables_managerLoadingTime=81; 2025-11-03T14:03:31.627214Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=6; 2025-11-03T14:03:31.627229Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=9; 2025-11-03T14:03:31.627245Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=11; 2025-11-03T14:03:31.627256Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=5; 2025-11-03T14:03:31.627280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=18; 2025-11-03T14:03:31.627329Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=41; 2025-11-03T14:03:31.627336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-11-03T14:03:31.627343Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-11-03T14:03:31.627351Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-11-03T14:03:31.627370Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=9; 2025-11-03T14:03:31.627546Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-11-03T14:03:31.627559Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=7; 2025-11-03T14:03:31.627567Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-11-03T14:03:31.627580Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=7; 2025-11-03T14:03:31.627590Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=3; 2025-11-03T14:03:31.627598Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=2; 2025-11-03T14:03:31.627602Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=604; 2025-11-03T14:03:31.627640Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-11-03T14:03:31.627650Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-11-03T14:03:31.627665Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];process=SwitchToWork;fline=columnshard_impl.cpp:1531;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-11-03T14:03:31.627787Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-11-03T14:03:31.627794Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:455;problem=Background activities cannot be started: no index at tablet; 2025-11-03T14:03:31.627891Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:265;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-11-03T14:03:31.627908Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:256;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-11-03T14:03:31.627914Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-11-03T14:03:31.627919Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:455;problem=Background activities cannot be started: no index at tablet; VERIFY failed (2025-11-03T14:03:31.627940Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0xF50AFC6) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0xF501867) NKikimr::NTabletPipe::TClient::TClient(NActors::TActorId const&, unsigned long, NKikimr::NTabletPipe::TClientConfig const&)+435 (0x1193ABE3) NKikimr::NTabletPipe::CreateClient(NActors::TActorId const&, unsigned long, NKikimr::NTabletPipe::TClientConfig const&)+50 (0x1193AA02) NKikimr::NColumnShard::TColumnShard::Handle(TAutoPtr, TDelete>&)+284 (0x1ACFD63C) NKikimr::NColumnShard::TColumnShard::StateWork(TAutoPtr&)+5290 (0x1ACF521A) NActors::IActor::Receive(TAutoPtr&)+216 (0xFC78008) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+1389 (0x1D0151CD) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+4931 (0x1D012223) NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration)+473 (0x1D0163B9) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration)+67 (0x1D026BA3) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration)+102 (0x1D026966) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+24 (0x1D018C08) NActors::TTestActorRuntimeBase::SimulateSleep(TDuration)+419 (0x1D018B13) NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, unsigned long)+216 (0x1EF4C728) ??+0 (0xF3C934B) ??+0 (0xF3C69C7) ??+0 (0xF3A7C0B) NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xF3B1BC7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+120 (0xF6AAEF8) NKikimr::NTestSuiteTColumnShardTestSchema::TCurrentTest::Execute()+433 (0xF3B14C1) NUnitTest::TTestFactory::Execute()+817 (0xF6AB641) NUnitTest::RunMain(int, char**)+3229 (0xF6B9DDD) ??+0 (0x7FF802CA7D90) __libc_start_main+128 (0x7FF802CA7E40) _start+41 (0xE526029) >> test.py::test[pg-tpcds-q38-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q14-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-ForceBlocks] >> test.py::test[pg-tpcds-q39-default.txt-Results] |55.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[blocks-member--Results] [GOOD] >> test.py::test[pg-tpcds-q99-default.txt-ForceBlocks] [GOOD] >> test.py::test[blocks-date_equals_scalar--Results] [GOOD] >> test.py::test[pg-tpcds-q99-default.txt-Results] >> test.py::test[blocks-date_greater_or_equal--ForceBlocks] |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> test.py::test[window-win_func_aggr_4func_sort--ForceBlocks] [GOOD] >> test.py::test[blocks-distinct_opt_state_keys--Results] [GOOD] >> test.py::test[blocks-filter_partial_expr--Results] >> test.py::test[window-win_func_aggr_4func_sort--Results] >> test.py::test[join-mapjoin_early_rewrite_sequence--Results] [GOOD] >> test.py::test[blocks-bitcast_block--Results] [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] >> BasicStatistics::ServerlessGlobalIndex >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_BackCompatibility >> IncrementalBackup::DropBackupCollectionSqlPathResolution [GOOD] >> IncrementalBackup::DropBackupCollectionSqlNonExistent >> test.py::test[blocks-block_input_various_types--ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_various_types--Results] [SKIPPED] >> IncrementalBackup::VerifyIncrementalBackupTableAttributes [GOOD] >> test.py::test[blocks-block_input_various_types_2-v3-ForceBlocks] [SKIPPED] >> test.py::test[blocks-block_input_various_types_2-v3-Results] [SKIPPED] >> IncrementalBackup::ResetOperationIncrementalBackup [GOOD] >> IncrementalBackup::ReplaceIntoIncrementalBackup >> DataShardSnapshots::MvccSnapshotAndSplit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: 2025-11-03T14:03:32.291158Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:32.322949Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:32.324535Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:32.324591Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:32.324612Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/003178/r3tmp/tmpQPQkMo/pdisk_1.dat 2025-11-03T14:03:32.379985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:32.380038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:32.394292Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:32.395414Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178611849834 != 1762178611849838 2025-11-03T14:03:32.426821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:32.476575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:32.510141Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:32.594501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:32.789536Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-11-03T14:03:32.789585Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:742:2612], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-11-03T14:03:32.859384Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:742:2612], subTag: 2} TUpsertActor finished in 0.069715s, errors=0 2025-11-03T14:03:32.859428Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:743:2613] with tag# 2 2025-11-03T14:03:33.588724Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:33.591722Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:313:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:33.591787Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:33.591810Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/003178/r3tmp/tmpl5GhlA/pdisk_1.dat 2025-11-03T14:03:33.650726Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:33.650764Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:33.653689Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:33.654046Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:34:2081] 1762178613168596 != 1762178613168600 2025-11-03T14:03:33.685091Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:33.727379Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:33.759014Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:33.845382Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:34.027132Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-11-03T14:03:34.027190Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:742:2612], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-11-03T14:03:34.095046Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:742:2612], subTag: 2} TUpsertActor finished in 0.067779s, errors=0 2025-11-03T14:03:34.095078Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:743:2613] with tag# 2 >> test.py::test[join-mapjoin_on_complex_type_optional_left_semi_single--Results] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt--ForceBlocks] >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink >> test.py::test[join-anyjoin_merge_nodup--ForceBlocks] [GOOD] >> test.py::test[order_by-SortByOneField--Results] [GOOD] >> DataShardSnapshots::VolatileSnapshotSplit >> test.py::test[join-anyjoin_merge_nodup--Results] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] |55.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[join-mapjoin_early_rewrite_sequence--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::VerifyIncrementalBackupTableAttributes [GOOD] Test command err: 2025-11-03T14:03:11.570470Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:11.609884Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:11.612888Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:11.612980Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:11.613024Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001609/r3tmp/tmpSkQzBI/pdisk_1.dat 2025-11-03T14:03:11.687397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:11.687446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:11.700398Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:11.701518Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178591120184 != 1762178591120188 2025-11-03T14:03:11.733669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:11.784572Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877761, Sender [1:593:2520], Recipient [1:398:2397]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:11.784602Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5327: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:11.784609Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6147: Pipe server connected, at tablet: 72057594046644480 2025-11-03T14:03:11.784632Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122432, Sender [1:590:2518], Recipient [1:398:2397]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-11-03T14:03:11.784638Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5241: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-11-03T14:03:11.807750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-11-03T14:03:11.807845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:11.807927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-11-03T14:03:11.807937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-11-03T14:03:11.807995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-11-03T14:03:11.808015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:11.808038Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:11.808307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-11-03T14:03:11.808355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-11-03T14:03:11.808363Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:11.808370Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-11-03T14:03:11.808412Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [1:398:2397], Recipient [1:398:2397]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-11-03T14:03:11.808419Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-11-03T14:03:11.808576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:11.808588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-11-03T14:03:11.808595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:03:11.808602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:03:11.808620Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:11.808725Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:11.808732Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-11-03T14:03:11.808754Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [1:398:2397], Recipient [1:398:2397]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-11-03T14:03:11.808761Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-11-03T14:03:11.808768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:11.808773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-11-03T14:03:11.808779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:03:11.808789Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:11.808842Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:11.808847Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-11-03T14:03:11.808863Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [1:398:2397], Recipient [1:398:2397]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-11-03T14:03:11.808868Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-11-03T14:03:11.808874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:11.808879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:11.808885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-11-03T14:03:11.808889Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:11.808896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:03:11.809509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:03:11.809636Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:11.809646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:03:11.809692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-11-03T14:03:11.809985Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877760, Sender [1:598:2525], Recipient [1:398:2397]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:600:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-11-03T14:03:11.809995Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5325: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-11-03T14:03:11.810002Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6097: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-11-03T14:03:11.810038Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269091328, Sender [1:394:2393], Recipient [1:398:2397]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-11-03T14:03:11.810122Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877761, Sender [1:602:2528], Recipient [1:398:2397]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:11.810128Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5327: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:11.810134Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6147: Pipe server connected, at tablet: 72057594046644480 2025-11-03T14:03:11.810150Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230 ... 7893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:33.918852Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:33.918855Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:33.939172Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:33.939197Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:33.939200Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:33.959589Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:33.959617Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:33.959622Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:33.980001Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:33.980027Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:33.980055Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:33.980061Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:33.980064Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:33.980084Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [7:399:2398], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:33.980088Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:34.020802Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:34.020832Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:34.020837Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:34.041230Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:34.041264Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:34.041272Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:34.062772Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:34.062804Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:34.062811Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:34.083242Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:34.083275Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:34.083281Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:34.103895Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:34.103921Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:34.103926Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:34.134957Z node 7 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:34.155920Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:34.155952Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:34.155958Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:34.186553Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:34.186586Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:34.186592Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:34.206922Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:34.206945Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:34.206948Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:34.227271Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:34.227292Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:34.227295Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:34.247645Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:34.247672Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:34.247677Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:34.247709Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:34.247719Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:34.247738Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [7:399:2398], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:34.247744Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:34.288378Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:34.288400Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:34.288404Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:34.308715Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:34.308737Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:34.308740Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:34.329075Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:34.329104Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:34.329109Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:34.349496Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:34.349521Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:34.349524Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:34.369964Z node 7 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037893][Partition][0][StateIdle] No data for blobs compaction 2025-11-03T14:03:34.370186Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:34.370197Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:34.370202Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:03:34.381986Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [7:1620:3208], Recipient [7:399:2398]: NKikimrSchemeOp.TDescribePath Path: "/Root/.backups/collections/TestCollection/19700101000007Z_incremental/Table" Options { ShowPrivateTable: true } 2025-11-03T14:03:34.382010Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme Found incremental backup table at: /Root/.backups/collections/TestCollection/19700101000007Z_incremental/Table 2025-11-03T14:03:34.382280Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [7:1622:3210], Recipient [7:399:2398]: NKikimrSchemeOp.TDescribePath Path: "/Root/.backups/collections/TestCollection/19700101000007Z_incremental/Table" Options { ShowPrivateTable: true } 2025-11-03T14:03:34.382285Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme Found attribute: __incremental_backup = {} >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink >> test.py::test[limit-sort_calc_limit--ForceBlocks] [GOOD] >> test.py::test[limit-sort_calc_limit--Results] >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink >> DataShardSnapshots::MvccSnapshotTailCleanup >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK >> test.py::test[insert-append_sorted-to_sorted-Results] [GOOD] >> IncrementalBackup::DropBackupCollectionSqlNonExistent [GOOD] >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore+WithIncremental >> test.py::test[pg-tpcds-q99-default.txt-Results] [GOOD] >> test.py::test[pg-tpch-q13-default.txt-ForceBlocks] >> test.py::test[insert-override-from_sorted-ForceBlocks] |55.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[blocks-block_input_various_types_2-v3-Results] [SKIPPED] |55.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[order_by-SortByOneField--Results] [GOOD] >> test.py::test[sampling-bind_default-default.txt-Results] [GOOD] >> test.py::test[sampling-bind_multiple_sample-default.txt-Results] >> BasicStatistics::TwoServerlessTwoSharedDbs >> IncrementalBackup::ReplaceIntoIncrementalBackup [GOOD] >> IncrementalBackup::ResetVsUpsertMissingColumnsTest |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeBaseStatsServerless >> test.py::test[window-win_multiaggr_list-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_multiaggr_list-default.txt-Results] >> test.py::test[window-current/session_incompat_sort--Results] [GOOD] >> test.py::test[window-win_func_aggr_4func_sort--Results] [GOOD] >> test.py::test[window-win_func_spec_with_part--ForceBlocks] >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink >> test.py::test[window-full/aggregations--Results] >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK >> test.py::test[join-extract_or_predicates-default.txt-Results] [GOOD] >> test.py::test[join-grace_join1--Results] [SKIPPED] >> test.py::test[join-inner_all_right--Results] >> test.py::test[limit-sort_calc_limit--Results] [GOOD] >> test.py::test[lineage-list_literal4-default.txt-ForceBlocks] [SKIPPED] >> test.py::test[lineage-list_literal4-default.txt-Results] [SKIPPED] >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> DataShardSnapshots::VolatileSnapshotMerge >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--ForceBlocks] >> test.py::test[tpch-q1-default.txt-Results] [GOOD] >> test.py::test[tpch-q13-default.txt-Results] |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBasic >> test.py::test[order_by-order_by_tuple_expr-default.txt-ForceBlocks] [GOOD] >> test.py::test[order_by-order_by_tuple_expr-default.txt-Results] >> BasicStatistics::ServerlessTimeIntervals >> test.py::test[pg-tpcds-q39-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q51-default.txt-Results] |55.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |55.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |55.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> Balancing::Balancing_OneTopic_PQv1 [GOOD] >> Balancing::Balancing_ManyTopics_TopicApi >> test.py::test[pg-tpcds-q25-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q25-default.txt-Results] >> test.py::test[blocks-filter_partial_expr--Results] [GOOD] >> test.py::test[blocks-pg--Results] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink >> IncrementalBackup::ResetVsUpsertMissingColumnsTest [GOOD] >> IncrementalBackup::ResetVsUpsertColumnStateSerialization >> DataShardSnapshots::ShardRestartWholeShardLockBasic [GOOD] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert >> BasicStatistics::Serverless >> test.py::test[in-in_immediate_subquery-default.txt-Results] [GOOD] >> test.py::test[in-in_sorted_by_tuple--Results] >> test.py::test[window-win_multiaggr_list-default.txt-Results] [GOOD] >> test.py::test[ypath-direct_read_from_dynamic--ForceBlocks] >> test.py::test[order_by-order_by_tuple_expr-default.txt-Results] [GOOD] >> test.py::test[pg-aggregate_minus_zero--ForceBlocks] >> test.py::test[blocks-date_sub--Results] [GOOD] >> test.py::test[blocks-div_uint64--Results] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink >> test.py::test[aggregate-group_by_ru_join--Results] [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink >> test.py::test[aggregate-group_compact_sorted--Results] >> test.py::test[aggregate-agg_phases_table2-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt-Results] >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate >> test.py::test[aggregate-aggrs_no_grouping_via_map_compact-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_cube_duo--Results] >> test.py::test[aggregate-group_by_gs_with_rollup--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_gs_with_rollup--Results] >> TStreamingQueryTestReboots::CreateDroppedStreamingQuery >> test.py::test[join-anyjoin_merge_nodup--Results] [GOOD] >> test.py::test[join-bush_in_in_in--ForceBlocks] >> test.py::test[insert-override-from_sorted-ForceBlocks] [GOOD] >> test.py::test[insert-override-from_sorted-Results] >> test.py::test[pg-tpcds-q25-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q50-default.txt-ForceBlocks] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByConflict >> test.py::test[join-mapjoin_opt_vs_2xopt--ForceBlocks] [GOOD] >> test.py::test[join-mapjoin_opt_vs_2xopt--Results] >> test.py::test[sampling-bind_multiple_sample-default.txt-Results] [GOOD] >> test.py::test[window-win_func_over_group_by_list_names_order_prefix--Results] [GOOD] >> test.py::test[window-win_func_rank_with_order_by_aggr_key--Results] >> test.py::test[schema-copy-read_schema-Results] >> TopicAutoscaling::Simple_PQv1 [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore+WithIncremental [GOOD] >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore-WithIncremental >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink >> IncrementalBackup::ResetVsUpsertColumnStateSerialization [GOOD] >> IncrementalBackup::QueryIncrementalBackupImplTableAfterRestore >> TopicAutoscaling::ControlPlane_BackCompatibility [GOOD] >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning >> test.py::test[pg-tpch-q13-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpch-q13-default.txt-Results] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable >> DataShardSnapshots::ShardRestartLockBrokenByConflict [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert >> test.py::test[insert-override-from_sorted-Results] [GOOD] >> test.py::test[insert-override-proto-ForceBlocks] >> test.py::test[pg-tpcds-q51-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q65-default.txt-Results] >> test.py::test[join-inner_all_right--Results] [GOOD] >> test.py::test[join-inner_with_select--Results] >> test.py::test[blocks-pg--Results] [GOOD] >> test.py::test[blocks-struct_type--Results] >> test.py::test[window-win_func_spec_with_part--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--ForceBlocks] [GOOD] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Results] >> test.py::test[window-win_func_spec_with_part--Results] >> test.py::test[blocks-div_uint64--Results] [GOOD] >> test.py::test[blocks-exists--Results] >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink >> test.py::test[blocks-date_greater_or_equal--ForceBlocks] [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink >> test.py::test[join-mapjoin_opt_vs_2xopt--Results] [GOOD] >> test.py::test[join-mergejoin_big_primary_unique--ForceBlocks] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink >> test.py::test[blocks-date_greater_or_equal--Results] >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK >> test.py::test[ypath-direct_read_from_dynamic--ForceBlocks] [GOOD] >> test.py::test[ypath-direct_read_from_dynamic--Results] >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> test.py::test[pg-aggregate_minus_zero--ForceBlocks] [GOOD] >> test.py::test[pg-aggregate_minus_zero--Results] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard >> test.py::test[aggregate-group_by_gs_with_rollup--Results] [GOOD] >> test.py::test[aggregate-group_by_hop--ForceBlocks] >> CommitOffset::Commit_WithWrongSession_ToParent [GOOD] >> CommitOffset::Commit_WithoutSession_ParentNotFinished >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore-WithIncremental [GOOD] >> test.py::test[pg-tpcds-q50-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q50-default.txt-Results] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink >> test.py::test[schema-copy-read_schema-Results] [GOOD] >> test.py::test[schema-insert_sorted-row_spec-Results] >> test.py::test[aggregate-group_compact_sorted--Results] [GOOD] >> test.py::test[window-win_func_spec_with_part--Results] [GOOD] >> test.py::test[ypath-direct_read_from_dynamic--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--Results] >> test.py::test[window-win_multiaggr_tuple-default.txt-ForceBlocks] >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Results] [GOOD] >> test.py::test[join-bush_in_in_in--ForceBlocks] [GOOD] >> test.py::test[join-bush_in_in_in--Results] >> IncrementalBackup::E2EMultipleBackupRestoreCycles [GOOD] >> IncrementalBackup::DropBackupCollectionSqlWithDatabaseLikeNames |55.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |55.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |55.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> test.py::test[pg-tpcds-q65-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_by_column_lookup_in_const_dict-default.txt-Results] [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeBaseStatsServerless Test command err: 2025-11-03T14:03:36.980821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:37.017815Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:37.017883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:37.017912Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0020f0/r3tmp/tmpwoUV3e/pdisk_1.dat 2025-11-03T14:03:37.122183Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:37.156025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:37.156070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:37.177950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6322, node 1 2025-11-03T14:03:37.239981Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:37.240007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:37.240012Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:37.240190Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:03:37.243080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:37.303086Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6854 2025-11-03T14:03:37.614483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-11-03T14:03:38.396461Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:38.397976Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-11-03T14:03:38.398659Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:38.406846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:38.406879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:38.440305Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-11-03T14:03:38.440905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:38.514719Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:38.514752Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:38.515142Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.515317Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.515461Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.515666Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.515732Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.515776Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.515816Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.515886Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.515917Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.527896Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:38.583209Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:38.592155Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-11-03T14:03:38.592187Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-11-03T14:03:38.599362Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-11-03T14:03:38.599551Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-11-03T14:03:38.599574Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-11-03T14:03:38.599579Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-11-03T14:03:38.599584Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-11-03T14:03:38.599588Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-11-03T14:03:38.599593Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-11-03T14:03:38.599598Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-11-03T14:03:38.599723Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-11-03T14:03:38.602155Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8310: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-11-03T14:03:38.602180Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8340: ConnectToSA(), pipe client id: [2:1827:2584], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-11-03T14:03:38.604439Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1850:2598] 2025-11-03T14:03:38.604510Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1850:2598], schemeshard id = 72075186224037897 2025-11-03T14:03:38.617390Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1879:2606] 2025-11-03T14:03:38.618088Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-11-03T14:03:38.621217Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-11-03T14:03:38.621231Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-11-03T14:03:38.621240Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-11-03T14:03:38.622705Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:306: Path does not belong to the specified domain: self# [2:1963:2646], domain# [OwnerId: 72057594046644480, LocalPathId: 2], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-11-03T14:03:38.623404Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:38.624699Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-11-03T14:03:38.624722Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-11-03T14:03:38.710432Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:38.763508Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-11-03T14:03:38.775738Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-11-03T14:03:38.854663Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-11-03T14:03:38.959635Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-11-03T14:03:39.690311Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:39.714342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-11-03T14:03:40.235771Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:40.249731Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8254: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 7207518622403 ... =72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:41.057245Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:41.057250Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:41.057255Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:41.057259Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:41.057274Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:41.057278Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:41.057287Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:41.057292Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:41.057297Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:41.057301Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:41.057306Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:41.057310Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:41.057319Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:41.057323Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:41.057331Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:41.057335Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:41.057700Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:41.057711Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:41.057720Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:41.057725Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:41.057739Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:41.057744Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:41.057752Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:41.057757Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:41.057763Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:41.057767Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:41.057772Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:41.057777Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:41.057793Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:41.057799Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:41.057808Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:41.057813Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:41.057818Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:41.057823Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:41.057829Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:41.057834Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:41.057844Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:41.057848Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:41.057856Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:41.057860Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:03:41.071811Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0x14A3B1B6) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0x14A31A57) NKikimr::NTabletPipe::TClient::TClient(NActors::TActorId const&, unsigned long, NKikimr::NTabletPipe::TClientConfig const&)+435 (0x16C26EA3) NKikimr::NTabletPipe::CreateClient(NActors::TActorId const&, unsigned long, NKikimr::NTabletPipe::TClientConfig const&)+50 (0x16C26CC2) NKikimr::NColumnShard::TColumnShard::Handle(TAutoPtr, TDelete>&)+284 (0x2087EFAC) NKikimr::NColumnShard::TColumnShard::StateWork(TAutoPtr&)+5290 (0x20876C8A) NActors::IActor::Receive(TAutoPtr&)+216 (0x151A21E8) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+1389 (0x2255567D) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+4931 (0x225526D3) NActors::TTestActorRuntimeBase::DispatchEvents(NActors::TDispatchOptions const&, TDuration)+67 (0x22551363) Ydb::Scripting::ExecuteYqlResponse NActors::TTestActorRuntime::WaitFuture(NThreading::TFuture, TDuration)+310 (0x28173A56) NKikimr::NStat::ExecuteYqlScript(NKikimr::NStat::TTestEnv&, TBasicString> const&, bool)+384 (0x28172CD0) NKikimr::NStat::CreateColumnStoreTable(NKikimr::NStat::TTestEnv&, TBasicString> const&, TBasicString> const&, int)+212 (0x281742B4) NKikimr::NStat::ProbeBaseStatsTest(bool)+1270 (0x149117A6) NKikimr::NStat::NTestSuiteHttpRequest::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0x14915D17) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+120 (0x14BDA438) NKikimr::NStat::NTestSuiteHttpRequest::TCurrentTest::Execute()+429 (0x149156BD) NUnitTest::TTestFactory::Execute()+817 (0x14BDAB81) NUnitTest::RunMain(int, char**)+3229 (0x14BE931D) ??+0 (0x7FF30A462D90) __libc_start_main+128 (0x7FF30A462E40) _start+41 (0x13A6B029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore-WithIncremental [GOOD] Test command err: 2025-11-03T14:03:13.864514Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:13.902671Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:13.905223Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:13.905308Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:13.905347Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0015d8/r3tmp/tmpahpr5X/pdisk_1.dat 2025-11-03T14:03:13.983106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:13.983150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:13.994889Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:13.995851Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178593420496 != 1762178593420500 2025-11-03T14:03:14.035125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:14.092156Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877761, Sender [1:593:2520], Recipient [1:398:2397]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:14.092186Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5327: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:14.092193Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6147: Pipe server connected, at tablet: 72057594046644480 2025-11-03T14:03:14.092219Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122432, Sender [1:590:2518], Recipient [1:398:2397]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-11-03T14:03:14.092225Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5241: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-11-03T14:03:14.111979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-11-03T14:03:14.112069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:14.112142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-11-03T14:03:14.112152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-11-03T14:03:14.112203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-11-03T14:03:14.112227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:14.112247Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:14.112496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-11-03T14:03:14.112548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-11-03T14:03:14.112555Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:14.112561Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-11-03T14:03:14.112602Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [1:398:2397], Recipient [1:398:2397]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-11-03T14:03:14.112610Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-11-03T14:03:14.112668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:14.112679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-11-03T14:03:14.112688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:03:14.112693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:03:14.112710Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:14.112799Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:14.112806Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-11-03T14:03:14.112826Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [1:398:2397], Recipient [1:398:2397]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-11-03T14:03:14.112831Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-11-03T14:03:14.112837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:14.112842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-11-03T14:03:14.112847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:03:14.112856Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:14.112901Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:14.112906Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-11-03T14:03:14.112920Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [1:398:2397], Recipient [1:398:2397]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-11-03T14:03:14.112924Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-11-03T14:03:14.112930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:14.112935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:14.112940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-11-03T14:03:14.112943Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:14.112950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:03:14.113677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:03:14.113800Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:14.113814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:03:14.113864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-11-03T14:03:14.114155Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877760, Sender [1:598:2525], Recipient [1:398:2397]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:600:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-11-03T14:03:14.114165Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5325: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-11-03T14:03:14.114171Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6097: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-11-03T14:03:14.114204Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269091328, Sender [1:394:2393], Recipient [1:398:2397]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-11-03T14:03:14.114287Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877761, Sender [1:602:2528], Recipient [1:398:2397]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:14.114293Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5327: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:14.114297Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6147: Pipe server connected, at tablet: 72057594046644480 2025-11-03T14:03:14.114312Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230 ... pl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-11-03T14:03:41.186535Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046644480 2025-11-03T14:03:41.186541Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [8:1132:2873], Recipient [8:398:2397]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-11-03T14:03:41.186543Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-11-03T14:03:41.186546Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046644480 2025-11-03T14:03:41.301668Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:41.301690Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:41.301727Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [8:398:2397], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:41.301732Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:41.486663Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:41.486687Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:41.486707Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [8:398:2397], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:41.486711Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:41.673887Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:41.673918Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:41.673941Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [8:398:2397], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:41.673947Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:41.817238Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:41.817264Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:41.817283Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [8:398:2397], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:41.817288Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:41.961299Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:41.961327Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:41.961346Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [8:398:2397], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:41.961352Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.104967Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.104998Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.105018Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [8:398:2397], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.105023Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.238140Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.238175Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.238198Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [8:398:2397], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.238204Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.392221Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.392245Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.392259Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [8:398:2397], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.392263Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.463854Z node 8 :TX_DATASHARD DEBUG: datashard_impl.h:3375: SendPeriodicTableStats at datashard 72075186224037888, for tableId 6, but no stats yet 2025-11-03T14:03:42.556232Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.556259Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.556280Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [8:398:2397], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.556285Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.638700Z node 8 :TX_DATASHARD DEBUG: datashard_impl.h:3375: SendPeriodicTableStats at datashard 72075186224037889, for tableId 12, but no stats yet 2025-11-03T14:03:42.699927Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.699961Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.699983Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [8:398:2397], Recipient [8:398:2397]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.699988Z node 8 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.710266Z node 8 :TX_DATASHARD DEBUG: datashard_impl.h:3375: SendPeriodicTableStats at datashard 72075186224037890, for tableId 13, but no stats yet 2025-11-03T14:03:42.806343Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037890, clientId# [8:1221:2952], serverId# [8:1222:2953], sessionId# [0:0:0] 2025-11-03T14:03:42.806406Z node 8 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710666. Ctx: { TraceId: 01k950fs9yfkvqafdfjfsgt8s4, Database: , SessionId: ydb://session/3?node_id=8&id=OThkYmM0ODktYjIxMjU5M2QtZTFjODM2OC01NTM5MWE5OA==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } 2025-11-03T14:03:42.811674Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:1232:2960], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/__ydb_backup_meta/MetaTable1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-11-03T14:03:42.812357Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2506: SessionId: ydb://session/3?node_id=8&id=NjllMDAxMDgtNzNhNjdkYzItZTgyMTJhYWMtYzk4YjFlYWE=, ActorId: [8:1229:2957], ActorState: ExecuteState, TraceId: 01k950fsar3dr5qesrjn5xy331, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-11-03T14:03:42.819965Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [8:1247:2969], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/MetaTable1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-11-03T14:03:42.821181Z node 8 :KQP_SESSION WARN: kqp_session_actor.cpp:2506: SessionId: ydb://session/3?node_id=8&id=NTRiMzAzNjAtMTZhNDYwZjktODA1MGU1MjktNWY0N2JjY2M=, ActorId: [8:1244:2966], ActorState: ExecuteState, TraceId: 01k950fsaxb12x1wnsm69xbt5d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-11-03T14:03:42.845058Z node 8 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710667. Ctx: { TraceId: 01k950fsb687bqt83p9hf627d2, Database: , SessionId: ydb://session/3?node_id=8&id=NTRkZmM2NWYtZjg2OGY3NDMtZDM4YmY3MDMtMmFiNDM4NTM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root { items { uint32_value: 100 } items { uint32_value: 1000 } } >> test.py::test[pg-aggregate_minus_zero--Results] [GOOD] >> test.py::test[pg-select_columnref2-default.txt-ForceBlocks] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink >> IncrementalBackup::QueryIncrementalBackupImplTableAfterRestore [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part11/pytest >> test.py::test[ypath-direct_read_from_dynamic--Results] [GOOD] Test command err: 127.0.0.1 - - [03/Nov/2025 14:02:25] "GET /foo.txt HTTP/1.1" 200 - 127.0.0.1 - - [03/Nov/2025 14:02:26] "GET /foo.txt HTTP/1.1" 200 - 127.0.0.1 - - [03/Nov/2025 14:02:28] "GET /foo.txt HTTP/1.1" 200 - |55.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part16/pytest >> test.py::test[optimizers-yql-8041-fuse_with_desc_map--Results] [GOOD] >> test.py::test[pg-tpcds-q50-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q69-default.txt-ForceBlocks] >> test.py::test[insert-override-proto-ForceBlocks] [GOOD] >> test.py::test[insert-override-proto-Results] >> test.py::test[pg-tpch-q13-default.txt-Results] [GOOD] >> test.py::test[produce-process_streaming_count-default.txt-ForceBlocks] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink |55.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part0/pytest >> test.py::test[pg-tpcds-q65-default.txt-Results] [GOOD] >> test.py::test[tpch-q13-default.txt-Results] [GOOD] >> test.py::test[tpch-q20-default.txt-Results] >> IncrementalBackup::DropBackupCollectionSqlWithDatabaseLikeNames [GOOD] >> IncrementalBackup::IncrementalBackupNonExistentTable >> TopicTimestamp::TimestampRead_6MB_Topic_exact_WithRestart [GOOD] >> TopicTimestamp::TimestampRead_6MB_Topic_offset+middle_WithRestart >> test.py::test[join-inner_with_select--Results] [GOOD] >> test.py::test[join-inner_with_select-off-Results] [SKIPPED] >> test.py::test[join-join_cbo_3_tables--Results] >> test.py::test[in-in_sorted_by_tuple--Results] [GOOD] >> test.py::test[in-in_types_cast-default.txt-Results] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::QueryIncrementalBackupImplTableAfterRestore [GOOD] Test command err: 2025-11-03T14:03:14.642680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:14.677762Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:14.680359Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:14.680435Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:14.680473Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/00158b/r3tmp/tmpj8eyol/pdisk_1.dat 2025-11-03T14:03:14.759561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:14.759605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:14.780804Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:14.781985Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178594186196 != 1762178594186200 2025-11-03T14:03:14.816583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:14.872970Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877761, Sender [1:593:2520], Recipient [1:398:2397]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:14.872997Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5327: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:14.873005Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6147: Pipe server connected, at tablet: 72057594046644480 2025-11-03T14:03:14.873029Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122432, Sender [1:590:2518], Recipient [1:398:2397]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-11-03T14:03:14.873034Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5241: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-11-03T14:03:14.893094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-11-03T14:03:14.893191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:14.893268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-11-03T14:03:14.893277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-11-03T14:03:14.893327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-11-03T14:03:14.893343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:14.893362Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:14.893588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-11-03T14:03:14.893637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-11-03T14:03:14.893645Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:14.893650Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-11-03T14:03:14.893687Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [1:398:2397], Recipient [1:398:2397]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-11-03T14:03:14.893693Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-11-03T14:03:14.893707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:14.893716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-11-03T14:03:14.893724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:03:14.893730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:03:14.893743Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:14.893815Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:14.893819Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-11-03T14:03:14.893836Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [1:398:2397], Recipient [1:398:2397]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-11-03T14:03:14.893840Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-11-03T14:03:14.893846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:14.893852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-11-03T14:03:14.893856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:03:14.893865Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:14.893919Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:14.893926Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-11-03T14:03:14.893943Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [1:398:2397], Recipient [1:398:2397]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-11-03T14:03:14.893947Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-11-03T14:03:14.893953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:14.893958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:14.893963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-11-03T14:03:14.893968Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:14.893974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:03:14.894673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:03:14.894816Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:14.894830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:03:14.894873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-11-03T14:03:14.895167Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877760, Sender [1:598:2525], Recipient [1:398:2397]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:600:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-11-03T14:03:14.895178Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5325: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-11-03T14:03:14.895184Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6097: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-11-03T14:03:14.895216Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269091328, Sender [1:394:2393], Recipient [1:398:2397]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-11-03T14:03:14.895301Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877761, Sender [1:602:2528], Recipient [1:398:2397]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:14.895308Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5327: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:14.895312Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6147: Pipe server connected, at tablet: 72057594046644480 2025-11-03T14:03:14.895327Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230 ... StateWork, processing event TEvPrivate::TEvProgressIncrementalRestore 2025-11-03T14:03:42.529357Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:408: [IncrementalRestore] Handle(TEvProgressIncrementalRestore) operationId: 281474976715666 tablet: 72057594046644480 2025-11-03T14:03:42.529361Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 281474976715666 tablet: 72057594046644480 2025-11-03T14:03:42.529366Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=0, CompletedOperations.size()=0, CurrentIncrementalIdx=2, IncrementalBackups.size()=2 2025-11-03T14:03:42.529368Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:106: [IncrementalRestore] No operations in progress, starting first incremental backup 2025-11-03T14:03:42.529371Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:187: [IncrementalRestore] No more incremental backups to process 2025-11-03T14:03:42.529463Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 281474976715666 2025-11-03T14:03:42.529480Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 281474976715666 2025-11-03T14:03:42.682998Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.683033Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.683057Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [7:399:2398], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.683062Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.826840Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.826872Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.826890Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [7:399:2398], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.826896Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.984155Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.984191Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:42.984212Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [7:399:2398], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:42.984217Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:43.118033Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:43.118068Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:43.118090Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [7:399:2398], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:43.118096Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:43.273354Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:43.273401Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:43.273424Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [7:399:2398], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:43.273430Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:43.345313Z node 7 :TX_DATASHARD DEBUG: datashard_impl.h:3375: SendPeriodicTableStats at datashard 72075186224037888, for tableId 6, but no stats yet 2025-11-03T14:03:43.437953Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:43.437982Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:43.438000Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [7:399:2398], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:43.438004Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:43.520489Z node 7 :TX_DATASHARD DEBUG: datashard_impl.h:3375: SendPeriodicTableStats at datashard 72075186224037889, for tableId 12, but no stats yet 2025-11-03T14:03:43.581646Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:43.581689Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:43.581710Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [7:399:2398], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:43.581714Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:43.591994Z node 7 :TX_DATASHARD DEBUG: datashard_impl.h:3375: SendPeriodicTableStats at datashard 72075186224037890, for tableId 14, but no stats yet 2025-11-03T14:03:43.663940Z node 7 :TX_DATASHARD DEBUG: datashard_impl.h:3375: SendPeriodicTableStats at datashard 72075186224037891, for tableId 15, but no stats yet 2025-11-03T14:03:43.739614Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:43.739651Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5238: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-11-03T14:03:43.739674Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124999, Sender [7:399:2398], Recipient [7:399:2398]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:43.739681Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5237: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-11-03T14:03:43.768645Z node 7 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715667. Ctx: { TraceId: 01k950ft8667gnxkky9p3t5mbp, Database: , SessionId: ydb://session/3?node_id=7&id=ZmQ5ZGJjM2QtODRlZDE0ZTYtMzgxZmRmZWUtODkxZTQ5MzU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root { items { uint32_value: 2 } items { uint32_value: 200 } }, { items { uint32_value: 3 } items { uint32_value: 300 } }, { items { uint32_value: 5 } items { uint32_value: 50 } }, { items { uint32_value: 6 } items { uint32_value: 600 } } 2025-11-03T14:03:43.791031Z node 7 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715668. Ctx: { TraceId: 01k950ft8t2d4yr42x4f1yjvhh, Database: , SessionId: ydb://session/3?node_id=7&id=MzRkYzg1ZmUtNzQ1ZThhMmQtNjFiNDViMjAtNzcxN2MzMDM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root { items { uint32_value: 1 } items { null_flag_value: NULL_VALUE } items { uint32_value: 2 } }, { items { uint32_value: 2 } items { uint32_value: 200 } items { uint32_value: 2 } }, { items { uint32_value: 6 } items { uint32_value: 600 } items { uint32_value: 2 } } 2025-11-03T14:03:43.816383Z node 7 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715669. Ctx: { TraceId: 01k950ft9h71dcxm92024nfr8m, Database: , SessionId: ydb://session/3?node_id=7&id=ZGIzY2JmMDQtMzMzNWZmNTAtZDczODQzNjQtY2NkYzQzYWU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 300 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { null_flag_value: NULL_VALUE } items { uint32_value: 2 } } 2025-11-03T14:03:43.839866Z node 7 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715670. Ctx: { TraceId: 01k950ftaa96ksjwf5q89ha2d9, Database: , SessionId: ydb://session/3?node_id=7&id=NGRlYTgyOGMtZTU4MWQ3ZWItYTQ5ZjViNjYtMjA5OWEwNmU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root { items { uint32_value: 2 } }, { items { uint32_value: 6 } } 2025-11-03T14:03:43.900846Z node 7 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715671. Ctx: { TraceId: 01k950ftb1dvg11vxy724w70tg, Database: , SessionId: ydb://session/3?node_id=7&id=YWZiZDQyNC05NTkyM2UtNmZhMWExNDAtMzAxOTBkMWI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:43.901700Z node 7 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715672. Ctx: { TraceId: 01k950ftb1dvg11vxy724w70tg, Database: , SessionId: ydb://session/3?node_id=7&id=YWZiZDQyNC05NTkyM2UtNmZhMWExNDAtMzAxOTBkMWI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root { items { uint32_value: 2 } items { uint32_value: 200 } }, { items { uint32_value: 6 } items { uint32_value: 600 } } 2025-11-03T14:03:43.921751Z node 7 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715673. Ctx: { TraceId: 01k950ftd0ax4k2xaz9hx3q0qx, Database: , SessionId: ydb://session/3?node_id=7&id=ZTM1ZTg2Ni03MjEyZDlmMy0yZjA1NzM1ZC1mMTg1M2FhMQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 4 } items { uint32_value: 40 } }, { items { uint32_value: 5 } items { uint32_value: 50 } } >> test.py::test[join-mergejoin_big_primary_unique--ForceBlocks] [GOOD] |55.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query_reboots/unittest >> test.py::test[join-mergejoin_big_primary_unique--Results] >> test.py::test[window-win_func_rank_with_order_by_aggr_key--Results] [GOOD] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Results] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink >> test.py::test[blocks-exists--Results] [GOOD] >> test.py::test[blocks-filter_by_column_with_drop--Results] |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query_reboots/unittest >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink >> test.py::test[insert-override-proto-Results] [GOOD] >> test.py::test[insert-use_anon_table_without_fill_fail--ForceBlocks] >> test.py::test[blocks-struct_type--Results] [GOOD] |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest |55.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> IncrementalBackup::IncrementalBackupNonExistentTable [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink >> Balancing::Balancing_ManyTopics_TopicApi [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink [GOOD] >> test.py::test[window-full/aggregations--Results] [GOOD] >> Balancing::Balancing_ManyTopics_PQv1 >> test.py::test[blocks-date_greater_or_equal--Results] [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink >> test.py::test[window-full/aggregations_leadlag_compact--Results] >> test.py::test[blocks-date_greater_scalar--ForceBlocks] >> test.py::test[aggregate-group_by_hop--ForceBlocks] [GOOD] >> test.py::test[aggregate-group_by_hop--Results] [SKIPPED] >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees >> test.py::test[join-bush_in_in_in--Results] [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink >> test.py::test[join-bush_in_in_in-off-ForceBlocks] >> KqpJoinOrder::Sortings4Year+RemoveLimitOperator |55.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[blocks-struct_type--Results] [GOOD] >> KqpJoinOrder::SortingsSimpleOrderByAliasIndexDesc+RemoveLimitOperator >> test.py::test[schema-insert_sorted-row_spec-Results] [GOOD] >> test.py::test[schema-other--Results] [SKIPPED] >> test.py::test[schema-select_field-row_spec-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::IncrementalBackupNonExistentTable [GOOD] Test command err: 2025-11-03T14:03:13.235130Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:13.274778Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:13.277440Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:13.277530Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:13.277572Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0015fe/r3tmp/tmpX3a6yA/pdisk_1.dat 2025-11-03T14:03:13.359493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:13.359537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:13.372239Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:13.373313Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178592752742 != 1762178592752746 2025-11-03T14:03:13.407197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:13.460681Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877761, Sender [1:593:2520], Recipient [1:398:2397]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:13.460712Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5327: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:13.460719Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6147: Pipe server connected, at tablet: 72057594046644480 2025-11-03T14:03:13.460747Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122432, Sender [1:590:2518], Recipient [1:398:2397]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-11-03T14:03:13.460753Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5241: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-11-03T14:03:13.493981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-11-03T14:03:13.494081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:13.494175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-11-03T14:03:13.494186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-11-03T14:03:13.494247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-11-03T14:03:13.494263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:13.494284Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:13.494552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-11-03T14:03:13.494603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-11-03T14:03:13.494612Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:13.494619Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-11-03T14:03:13.494683Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [1:398:2397], Recipient [1:398:2397]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-11-03T14:03:13.494692Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-11-03T14:03:13.494753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:13.494764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-11-03T14:03:13.494772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:03:13.494778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:03:13.494794Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:13.494897Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:13.494904Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-11-03T14:03:13.494924Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [1:398:2397], Recipient [1:398:2397]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-11-03T14:03:13.494928Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-11-03T14:03:13.494934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:13.494941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-11-03T14:03:13.494946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:03:13.494956Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:13.495001Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:13.495006Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2025-11-03T14:03:13.495025Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [1:398:2397], Recipient [1:398:2397]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-11-03T14:03:13.495029Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-11-03T14:03:13.495035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:13.495040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-11-03T14:03:13.495045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-11-03T14:03:13.495049Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-11-03T14:03:13.495056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:03:13.496169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:03:13.496324Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:13.496339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:03:13.496386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-11-03T14:03:13.496704Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877760, Sender [1:598:2525], Recipient [1:398:2397]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:600:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-11-03T14:03:13.496714Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5325: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-11-03T14:03:13.496722Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6097: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-11-03T14:03:13.496756Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269091328, Sender [1:394:2393], Recipient [1:398:2397]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-11-03T14:03:13.496841Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877761, Sender [1:602:2528], Recipient [1:398:2397]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:13.496847Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5327: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:13.496853Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6147: Pipe server connected, at tablet: 72057594046644480 2025-11-03T14:03:13.496867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230 ... e_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:44.649814Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-11-03T14:03:44.649830Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2025-11-03T14:03:44.649835Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:44.649853Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2025-11-03T14:03:44.649858Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-11-03T14:03:44.649871Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [6:824:2660] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 at schemeshard: 72057594046644480 2025-11-03T14:03:44.649999Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [6:841:2670], Recipient [6:398:2397]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-11-03T14:03:44.650006Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-11-03T14:03:44.650011Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046644480 2025-11-03T14:03:44.653739Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:855:2683], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:44.653782Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:44.653920Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:860:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:44.653933Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:44.654084Z node 6 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [6:859:2686] txid# 281474976715660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-11-03T14:03:45.267155Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:45.272909Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:45.273575Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:45.273592Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-11-03T14:03:45.273631Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0015fe/r3tmp/tmp9GrNPu/pdisk_1.dat 2025-11-03T14:03:45.327831Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:45.327869Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:45.332567Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:45.333113Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [7:34:2081] 1762178624854729 != 1762178624854733 2025-11-03T14:03:45.364139Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:45.407543Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:45.454015Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:45.530889Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:652:2547], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:45.530940Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:45.531026Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:678:2554], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:45.531036Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:45.531550Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateBackupCollection, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp:1251) 2025-11-03T14:03:45.639246Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:708:2582], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:45.639285Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:45.639400Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:713:2586], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:45.639414Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:45.639998Z node 7 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [7:712:2585] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/NonExistentTable\', error: path hasn\'t been resolved, nearest resolved path: \'/Root\' (id: [OwnerId: 72057594046644480, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp:190" issue_code: 200200 severity: 1 } 2025-11-03T14:03:45.640881Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:729:2594], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:45.640897Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:45.640938Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:733:2598], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:45.640944Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:45.640952Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:736:2601], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:45.641575Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:45.663337Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:45.757668Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:738:2603], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-11-03T14:03:45.779056Z node 7 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [7:807:2641] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink >> test.py::test[insert-use_anon_table_without_fill_fail--ForceBlocks] [GOOD] >> KqpFlipJoin::Inner_1 >> KqpJoinOrder::SortingsDifferentDirs+RemoveLimitOperator >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink >> TTxDataShardUploadRows::TestUploadRows >> test.py::test[insert-use_anon_table_without_fill_fail--Results] [GOOD] >> test.py::test[insert-yql-14538--ForceBlocks] |55.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part4/pytest >> test.py::test[aggregate-group_by_hop--Results] [SKIPPED] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink >> test.py::test[pg-select_columnref2-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-select_columnref2-default.txt-Results] >> test.py::test[join-mergejoin_big_primary_unique--Results] [GOOD] >> test.py::test[join-mergejoin_with_table_range--ForceBlocks] >> test.py::test[window-win_multiaggr_tuple-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData >> TStreamingQueryTestReboots::CreateDroppedStreamingQuery [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query_reboots/unittest >> TStreamingQueryTestReboots::CreateDroppedStreamingQuery [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:115:2146] 2025-11-03T14:03:39.067013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:03:39.067040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:03:39.067046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:03:39.067056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:03:39.067063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:03:39.067068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:03:39.067078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:03:39.067092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:03:39.067240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:03:39.067317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:03:39.091497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-11-03T14:03:39.091541Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:39.091647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:180:2058] recipient: [1:15:2062] 2025-11-03T14:03:39.094797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:03:39.095923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:03:39.095983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:03:39.098097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:03:39.098153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:03:39.098268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:03:39.098346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:03:39.099123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:03:39.099166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:03:39.099460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:03:39.099479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:03:39.099505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:03:39.099515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:03:39.099522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:03:39.099544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-11-03T14:03:39.101360Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-11-03T14:03:39.118866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:03:39.118960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:03:39.119039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:03:39.119048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:03:39.119120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:03:39.119133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:39.120061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:03:39.120111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:03:39.120182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:03:39.120192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:03:39.120196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:03:39.120203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:03:39.120688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:03:39.120709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:03:39.120717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:03:39.121108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:03:39.121118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:03:39.121123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:03:39.121129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:03:39.121617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:03:39.122026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:03:39.122061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:03:39.122254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:03:39.122274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tabl ... ard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-11-03T14:03:47.444760Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1005 2025-11-03T14:03:47.444763Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2025-11-03T14:03:47.444767Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-11-03T14:03:47.444774Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1005, ready parts: 0/1, is published: true 2025-11-03T14:03:47.445266Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1005:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1005 msg type: 269090816 2025-11-03T14:03:47.445293Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1005, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1005 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1005 at step: 5000006 2025-11-03T14:03:47.445391Z node 30 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:03:47.445407Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1005 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 128849021040 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:03:47.445413Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_streaming_query.cpp:22: [72057594046678944] TDropStreamingQuery TPropose, operationId: 1005:0, HandleReply TEvOperationPlan: step# 5000006 2025-11-03T14:03:47.445426Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-11-03T14:03:47.445441Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1005:0 128 -> 240 2025-11-03T14:03:47.445463Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-11-03T14:03:47.445470Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-11-03T14:03:47.445670Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-11-03T14:03:47.445732Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 FAKE_COORDINATOR: Erasing txId 1005 2025-11-03T14:03:47.445977Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:03:47.445982Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:03:47.446006Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1005, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-11-03T14:03:47.446022Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:03:47.446025Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [30:210:2211], at schemeshard: 72057594046678944, txId: 1005, path id: 1 2025-11-03T14:03:47.446028Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [30:210:2211], at schemeshard: 72057594046678944, txId: 1005, path id: 4 2025-11-03T14:03:47.446054Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1005:0, at schemeshard: 72057594046678944 2025-11-03T14:03:47.446059Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1005:0 ProgressState 2025-11-03T14:03:47.446071Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1005:0 progress is 1/1 2025-11-03T14:03:47.446074Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-11-03T14:03:47.446078Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1005:0 progress is 1/1 2025-11-03T14:03:47.446080Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-11-03T14:03:47.446083Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1005, ready parts: 1/1, is published: false 2025-11-03T14:03:47.446088Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1005 ready parts: 1/1 2025-11-03T14:03:47.446091Z node 30 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1005:0 2025-11-03T14:03:47.446094Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1005:0 2025-11-03T14:03:47.446102Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-11-03T14:03:47.446106Z node 30 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1005, publications: 2, subscribers: 0 2025-11-03T14:03:47.446111Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-11-03T14:03:47.446115Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1005, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-11-03T14:03:47.446194Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-11-03T14:03:47.446205Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1005 2025-11-03T14:03:47.446209Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1005 2025-11-03T14:03:47.446212Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-11-03T14:03:47.446215Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-11-03T14:03:47.446259Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-11-03T14:03:47.446263Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-11-03T14:03:47.446269Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-11-03T14:03:47.446286Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2025-11-03T14:03:47.446292Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1005 2025-11-03T14:03:47.446294Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1005 2025-11-03T14:03:47.446297Z node 30 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1005, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-11-03T14:03:47.446300Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-11-03T14:03:47.446305Z node 30 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1005, subscribers: 0 2025-11-03T14:03:47.447148Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 2025-11-03T14:03:47.447174Z node 30 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-11-03T14:03:47.447183Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1005 TestModificationResult got TxId: 1005, wait until txId: 1005 TestWaitNotification wait txId: 1005 2025-11-03T14:03:47.447231Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1005: send EvNotifyTxCompletion 2025-11-03T14:03:47.447240Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1005 2025-11-03T14:03:47.447317Z node 30 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1005, at schemeshard: 72057594046678944 2025-11-03T14:03:47.447333Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1005: got EvNotifyTxCompletionResult 2025-11-03T14:03:47.447337Z node 30 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1005: satisfy waiter [30:404:2394] TestWaitNotification: OK eventTxId 1005 >> test.py::test[aggregate-group_by_cube_duo--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_order_by_expr--Results] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace >> test.py::test[pg-tpcds-q69-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q69-default.txt-Results] >> TTxDataShardUploadRows::TestUploadRows [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry >> KqpFlipJoin::Inner_1 [GOOD] >> test.py::test[aggregate-group_compact_sorted_distinct_complex--Results] [GOOD] >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Results] >> KqpFlipJoin::Inner_2 >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink >> test.py::test[in-in_types_cast-default.txt-Results] [GOOD] >> test.py::test[blocks-filter_by_column_with_drop--Results] [GOOD] >> test.py::test[blocks-interval_add_date_scalar--Results] >> TTxDataShardUploadRows::RetryUploadRowsToShard >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] >> test.py::test[pg-select_columnref2-default.txt-Results] [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink >> test.py::test[produce-process_streaming_count-default.txt-ForceBlocks] [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK >> test.py::test[produce-process_streaming_count-default.txt-Results] >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks >> test.py::test[window-win_multiaggr_tuple-default.txt-Results] [GOOD] >> test.py::test[window-yql-20456-default.txt-ForceBlocks] >> test.py::test[schema-select_field-row_spec-Results] [GOOD] >> test.py::test[schema-select_field-schema-Results] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError >> KqpFlipJoin::Inner_2 [GOOD] >> test.py::test[pg-tpcds-q69-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q76-default.txt-ForceBlocks] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink |55.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[pg-select_columnref2-default.txt-Results] [GOOD] |55.9%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[in-in_types_cast-default.txt-Results] [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart >> TTxDataShardUploadRows::UploadRowsToReplicatedTable >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::Inner_2 [GOOD] Test command err: Trying to start YDB, gRPC: 6331, MsgBus: 25296 2025-11-03T14:03:47.043743Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499573253260250:2072];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:47.043866Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001dff/r3tmp/tmpVAs69g/pdisk_1.dat 2025-11-03T14:03:47.073333Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:47.083706Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:47.083869Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499573253260216:2081] 1762178627042994 != 1762178627042997 TServer::EnableGrpc on GrpcPort 6331, node 1 2025-11-03T14:03:47.093955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:47.093968Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:47.093971Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:47.094012Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25296 TClient is connected to server localhost:25296 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-11-03T14:03:47.150981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:47.151012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:47.152034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:47.152771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:47.163807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:47.181740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:47.200510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:47.211297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:47.373325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:47.444103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499573253261864:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.444125Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.444194Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499573253261874:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.444205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.486468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.494427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.505329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.518293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.531773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.539511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.553927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.568140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.584552Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499573253262737:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.584585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499573253262742:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.584588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.584636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499573253262745:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.584651Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.585329Z node 1 :FLAT_TX_SCHEMESHARD W ... Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:48.446065Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:48.455519Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:48.465849Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:48.484701Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:48.496935Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:48.658966Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:48.839737Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499577146475866:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:48.839758Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:48.839800Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499577146475875:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:48.839805Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:48.853255Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:48.861584Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:48.869929Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:48.883783Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:48.897023Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:48.904061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:48.911216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:48.925982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:48.941828Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499577146476738:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:48.941848Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:48.941877Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499577146476744:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:48.941881Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:48.941895Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499577146476743:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:48.942449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:48.945000Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7568499577146476747:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-11-03T14:03:49.043837Z node 2 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [2:7568499581441444095:3554] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:49.259797Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:49.266847Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:49.275308Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:49.281996Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:49.389725Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK >> test.py::test[join-bush_in_in_in-off-ForceBlocks] [GOOD] >> test.py::test[join-bush_in_in_in-off-Results] [SKIPPED] >> test.py::test[join-commonjoin_unused_keys--ForceBlocks] [SKIPPED] >> test.py::test[join-commonjoin_unused_keys--Results] [SKIPPED] >> test.py::test[join-mergejoin_with_table_range--ForceBlocks] [GOOD] >> test.py::test[join-mergejoin_with_table_range--Results] >> test.py::test[join-full_trivial--ForceBlocks] >> test.py::test[window-win_func_with_struct_access_full_access-default.txt-Results] [GOOD] >> test.py::test[window-win_with_cur_row--Results] >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRows >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] >> test.py::test[aggregate-aggregate_distinct_expr-default.txt-Results] [GOOD] >> test.py::test[aggregate-compare_by_nulls-default.txt-Results] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish >> TTxDataShardUploadRows::RetryUploadRowsToShard [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees Test command err: Trying to start YDB, gRPC: 3386, MsgBus: 30313 2025-11-03T14:03:46.351342Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499568938103619:2073];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:46.351512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001df5/r3tmp/tmpvTVVGL/pdisk_1.dat 2025-11-03T14:03:46.380424Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 3386, node 1 2025-11-03T14:03:46.392182Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:46.392629Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499568938103575:2081] 1762178626350571 != 1762178626350574 2025-11-03T14:03:46.393942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:46.393949Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:46.393951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:46.393985Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:30313 TClient is connected to server localhost:30313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:46.438537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:46.454255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:46.454285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:46.455399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:46.656169Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:46.815048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499568938104250:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:46.815071Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499568938104261:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:46.815079Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:46.815255Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499568938104266:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:46.815280Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:46.815376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499568938104284:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:46.815394Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:46.815806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:46.817884Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499568938104264:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:46.893042Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499568938104319:2338] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:46.940946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-11-03T14:03:46.966589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7568499568938104523:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:46.966679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7568499568938104523:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:46.966722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7568499568938104523:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:46.966741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7568499568938104523:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:03:46.966754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7568499568938104523:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:03:46.966772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7568499568938104523:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:03:46.966791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7568499568938104523:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:03:46.966812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7568499568938104523:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:03:46.966830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7568499568938104523:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:03:46.966846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7568499568938104523:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:03:46.966863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7568499568938104523:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:03:46.966879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7568499568938104523:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:03:46.966897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7568499568938104523:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:03:46.967868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499568938104522:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:46.967893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499568938104522:2337];tablet_id=72075186224037888;process=TTxInitSchema::Exec ... er_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:46.998684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:46.998688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:46.998690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:46.998697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:46.998699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:46.998705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:46.998708Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:46.999255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499568938104677:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:46.999268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499568938104677:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:46.999289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499568938104677:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:46.999300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499568938104677:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:03:46.999315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499568938104677:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:03:46.999331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499568938104677:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:03:46.999349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499568938104677:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:03:46.999361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499568938104677:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:03:46.999375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499568938104677:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:03:46.999390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499568938104677:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:03:46.999401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499568938104677:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:03:46.999417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499568938104677:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:03:46.999427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499568938104677:2352];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:03:46.999854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:46.999863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:46.999871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:46.999875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:46.999886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:46.999889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:46.999896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:46.999899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:46.999904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:46.999907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:46.999911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:46.999914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:46.999927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:46.999932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:46.999940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:46.999943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:46.999947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:46.999953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:46.999962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:46.999968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:46.999977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:46.999981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:46.999987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:46.999993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:03:46.999333Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7FD696B62AC2 13. ??:0: ?? @ 0x7FD696BF48BF >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink >> KqpJoinOrder::TestJoinOrderHintsSimple-ColumnStore >> test.py::test[produce-process_streaming_count-default.txt-Results] [GOOD] >> test.py::test[produce-process_trivial_as_struct-default.txt-ForceBlocks] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink >> KqpIndexLookupJoin::LeftSemi+StreamLookup >> KqpFlipJoin::Right_1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] Test command err: 2025-11-03T14:03:35.756963Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:35.797516Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:35.799846Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:35.799934Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:35.799970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001d62/r3tmp/tmpb9Vh2V/pdisk_1.dat 2025-11-03T14:03:35.877579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:35.877629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:35.890280Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:35.891206Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178615269088 != 1762178615269092 2025-11-03T14:03:35.922258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:35.970011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:36.005093Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:225: SessionId: ydb://session/3?node_id=1&id=NGQ1Njc1YjgtZmIyMjQ3NWMtOGRiMWE1YWQtNTQyMjhl, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NGQ1Njc1YjgtZmIyMjQ3NWMtOGRiMWE1YWQtNTQyMjhl (tmp dir name: 0d654a04-413a-426c-a748-71afa3410605) 2025-11-03T14:03:36.005209Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:229: SessionId: ydb://session/3?node_id=1&id=NGQ1Njc1YjgtZmIyMjQ3NWMtOGRiMWE1YWQtNTQyMjhl, ActorId: [1:614:2539], ActorState: unknown state, session actor bootstrapped 2025-11-03T14:03:36.005299Z node 1 :KQP_SESSION DEBUG: kqp_session_actor.cpp:445: SessionId: ydb://session/3?node_id=1&id=NGQ1Njc1YjgtZmIyMjQ3NWMtOGRiMWE1YWQtNTQyMjhl, ActorId: [1:614:2539], ActorState: ReadyState, TraceId: 01k950fjp50pvncxpkaqgn3m9b, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE TABLE `/Root/table1` (key int, value int, PRIMARY KEY (key)); rpcActor: [0:0:0] database: databaseId: /Root pool id: default 2025-11-03T14:03:36.035116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:618:2542], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:36.035161Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:36.035221Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:635:2547], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:36.035227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:36.037123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:36.050497Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:656:2567], Recipient [1:665:2573]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:03:36.050689Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:656:2567], Recipient [1:665:2573]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:03:36.050734Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:665:2573] 2025-11-03T14:03:36.050769Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:36.051462Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:656:2567], Recipient [1:665:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:03:36.057412Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:36.057444Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:36.057558Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:36.057564Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:36.057569Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:36.057612Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:36.057628Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:36.057636Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:680:2573] in generation 1 2025-11-03T14:03:36.057686Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:36.060642Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:36.060690Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:36.060705Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:682:2583] 2025-11-03T14:03:36.060709Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:36.060712Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:36.060716Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:36.060772Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:665:2573], Recipient [1:665:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:36.060778Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:36.060842Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:36.060856Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:36.060862Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:36.060867Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:36.060872Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-11-03T14:03:36.060877Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-11-03T14:03:36.060880Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-11-03T14:03:36.060884Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:36.060888Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:36.060953Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:666:2574], Recipient [1:665:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:36.060957Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:36.060961Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:661:2570], serverId# [1:666:2574], sessionId# [0:0:0] 2025-11-03T14:03:36.060968Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:666:2574] 2025-11-03T14:03:36.060971Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-11-03T14:03:36.060986Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:36.061030Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-11-03T14:03:36.061039Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:36.061050Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:36.061063Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-11-03T14:03:36.061066Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-11-03T14:03:36.061070Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-11-03T14:03:36.061073Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-11-03T14:03:36.061107Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status ... .614568Z node 13 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-11-03T14:03:50.614576Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715666] at 72075186224037888 on unit CheckDataTx 2025-11-03T14:03:50.614581Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-11-03T14:03:50.614584Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CheckDataTx 2025-11-03T14:03:50.614587Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-11-03T14:03:50.614589Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715666] at 72075186224037888 on unit BuildAndWaitDependencies 2025-11-03T14:03:50.614593Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v400/0 IncompleteEdge# v{min} UnprotectedReadEdge# v300/18446744073709551615 ImmediateWriteEdge# v400/18446744073709551615 ImmediateWriteEdgeReplied# v400/18446744073709551615 2025-11-03T14:03:50.614598Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715666] at 72075186224037888 2025-11-03T14:03:50.614601Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-11-03T14:03:50.614603Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-11-03T14:03:50.614605Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715666] at 72075186224037888 to execution unit BlockFailPoint 2025-11-03T14:03:50.614608Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715666] at 72075186224037888 on unit BlockFailPoint 2025-11-03T14:03:50.614610Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-11-03T14:03:50.614612Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BlockFailPoint 2025-11-03T14:03:50.614615Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-11-03T14:03:50.614617Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715666] at 72075186224037888 on unit ExecuteKqpDataTx 2025-11-03T14:03:50.614650Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:263: Operation [0:281474976715666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-11-03T14:03:50.614662Z node 13 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-11-03T14:03:50.614675Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:510: add locks to result: 0 2025-11-03T14:03:50.614682Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-11-03T14:03:50.614685Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-11-03T14:03:50.614688Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715666] at 72075186224037888 to execution unit FinishPropose 2025-11-03T14:03:50.614690Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-11-03T14:03:50.614695Z node 13 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-11-03T14:03:50.614706Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715666] at 72075186224037888 is DelayComplete 2025-11-03T14:03:50.614709Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit FinishPropose 2025-11-03T14:03:50.614711Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715666] at 72075186224037888 to execution unit CompletedOperations 2025-11-03T14:03:50.614713Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715666] at 72075186224037888 on unit CompletedOperations 2025-11-03T14:03:50.614719Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-11-03T14:03:50.614722Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CompletedOperations 2025-11-03T14:03:50.614724Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:281474976715666] at 72075186224037888 has finished 2025-11-03T14:03:50.614730Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:03:50.614733Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-11-03T14:03:50.614737Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:50.614904Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 275709965, Sender [13:69:2116], Recipient [13:916:2733]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 13 Status: STATUS_NOT_FOUND 2025-11-03T14:03:50.631345Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715667. Ctx: { TraceId: 01k950g0yqcshs36y523tpzr49, Database: , SessionId: ydb://session/3?node_id=13&id=NGNkYzhmMDEtYTRiMWRkZjMtOGJiODhkODgtZDk1NzIzMzg=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:50.631873Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [13:982:2777], Recipient [13:916:2733]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-11-03T14:03:50.631926Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2561: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-11-03T14:03:50.631937Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v400/0 IncompleteEdge# v{min} UnprotectedReadEdge# v300/18446744073709551615 ImmediateWriteEdge# v400/18446744073709551615 ImmediateWriteEdgeReplied# v400/18446744073709551615 2025-11-03T14:03:50.631945Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2664: 72075186224037888 changed HEAD read to non-repeatable v400/18446744073709551615 2025-11-03T14:03:50.631954Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-11-03T14:03:50.631975Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037888 is Executed 2025-11-03T14:03:50.631979Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-11-03T14:03:50.631984Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-11-03T14:03:50.631987Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-11-03T14:03:50.631999Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2025-11-03T14:03:50.632004Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037888 is Executed 2025-11-03T14:03:50.632006Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-11-03T14:03:50.632009Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-11-03T14:03:50.632013Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-11-03T14:03:50.632025Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1657: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-11-03T14:03:50.632074Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2272: 72075186224037888 Complete read# {[13:982:2777], 0} after executionsCount# 1 2025-11-03T14:03:50.632080Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2246: 72075186224037888 read iterator# {[13:982:2777], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-11-03T14:03:50.632094Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2297: 72075186224037888 read iterator# {[13:982:2777], 0} finished in read 2025-11-03T14:03:50.632103Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037888 is Executed 2025-11-03T14:03:50.632106Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-11-03T14:03:50.632109Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-11-03T14:03:50.632112Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-11-03T14:03:50.632128Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037888 is Executed 2025-11-03T14:03:50.632130Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-11-03T14:03:50.632133Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:4] at 72075186224037888 has finished 2025-11-03T14:03:50.632137Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2796: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-11-03T14:03:50.632153Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2845: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-11-03T14:03:50.632333Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [13:982:2777], Recipient [13:916:2733]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-11-03T14:03:50.632344Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3519: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] Test command err: 2025-11-03T14:03:48.601321Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:48.639384Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:48.642082Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:48.642160Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:48.642192Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0021d5/r3tmp/tmpfE9XHO/pdisk_1.dat 2025-11-03T14:03:48.713080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:48.713122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:48.725072Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:48.726031Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178628180170 != 1762178628180174 2025-11-03T14:03:48.757187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:48.805149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:48.837874Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:48.921434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:48.936475Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:03:48.936728Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:03:48.936790Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:674:2566] 2025-11-03T14:03:48.936849Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:48.947372Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:03:48.947563Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:48.947587Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:48.947753Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:48.947763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:48.947771Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:48.947830Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:48.947853Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:48.947867Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-11-03T14:03:48.958163Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:48.963184Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:48.963279Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:48.963305Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-11-03T14:03:48.963311Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:48.963318Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:48.963325Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:48.963406Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:674:2566], Recipient [1:674:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:48.963414Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:48.963528Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:48.963555Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:48.963656Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:48.963668Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:48.963676Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-11-03T14:03:48.963683Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-11-03T14:03:48.963689Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-11-03T14:03:48.963694Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:48.963700Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:48.963714Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:681:2570], Recipient [1:674:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:48.963720Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:48.963727Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:673:2565], serverId# [1:681:2570], sessionId# [0:0:0] 2025-11-03T14:03:48.963739Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:681:2570] 2025-11-03T14:03:48.963745Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-11-03T14:03:48.963767Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:48.963828Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-11-03T14:03:48.963839Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:48.963858Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:48.963867Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-11-03T14:03:48.963872Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-11-03T14:03:48.963879Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-11-03T14:03:48.963884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-11-03T14:03:48.964005Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-11-03T14:03:48.964012Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-11-03T14:03:48.964016Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-11-03T14:03:48.964021Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-11-03T14:03:48.964033Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-11-03T14:03:48.964038Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-11-03T14:03:48.964043Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-11-03T14:03:48.964048Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-11-03T14:03:48.964054Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-11-03T14:03:48.964360Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269746185, Sender [1:693:2577], Recipient [1:674:2566]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-11-03T14:03:48.964370Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:48.974705Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:03:48.974743Z node 1 :TX_DATASHAR ... KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:313:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:50.479438Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:50.479458Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0021d5/r3tmp/tmpFxz8yh/pdisk_1.dat 2025-11-03T14:03:50.533593Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:50.533633Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:50.536462Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:50.536791Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:34:2081] 1762178630091122 != 1762178630091126 2025-11-03T14:03:50.567733Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:50.610743Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:50.653261Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:50.726394Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:50.739224Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:675:2566] 2025-11-03T14:03:50.739290Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:50.746718Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:50.746753Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:50.746880Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:50.746887Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:50.746892Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:50.746929Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:50.746944Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:50.746954Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:690:2566] in generation 1 2025-11-03T14:03:50.757276Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:50.757308Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:50.757340Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:50.757352Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:692:2576] 2025-11-03T14:03:50.757357Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:50.757361Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:50.757365Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:50.757494Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:50.757515Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:50.757604Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:50.757611Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:50.757619Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:50.757623Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:50.757632Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:672:2564], serverId# [2:677:2567], sessionId# [0:0:0] 2025-11-03T14:03:50.757656Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:50.757708Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:50.757725Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-11-03T14:03:50.758014Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:50.768276Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:03:50.768315Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-11-03T14:03:50.902346Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:707:2585], serverId# [2:709:2587], sessionId# [0:0:0] 2025-11-03T14:03:50.902649Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-11-03T14:03:50.902663Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:50.902740Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:50.902752Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-11-03T14:03:50.902763Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000:281474976710657] in PlanQueue unit at 72075186224037888 2025-11-03T14:03:50.902831Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976710657 keys extracted: 0 2025-11-03T14:03:50.902862Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-11-03T14:03:50.902925Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:50.902939Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-11-03T14:03:50.903058Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-11-03T14:03:50.903136Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:50.903418Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-11-03T14:03:50.903426Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:50.903565Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-11-03T14:03:50.903575Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:50.903782Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:50.903836Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:50.903845Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:50.903851Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-11-03T14:03:50.903868Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:50.903879Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-11-03T14:03:50.903889Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:50.904264Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-11-03T14:03:50.904278Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-11-03T14:03:50.904369Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-11-03T14:03:50.905337Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:743:2613], serverId# [2:744:2614], sessionId# [0:0:0] 2025-11-03T14:03:50.905364Z node 2 :TX_DATASHARD NOTICE: datashard__op_rows.cpp:209: Rejecting bulk upsert request on datashard: tablet# 72075186224037888, error# Can't execute bulk upsert at replicated table >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError [GOOD] Test command err: 2025-11-03T14:03:47.503298Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:47.540315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:47.542343Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:47.542406Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:47.542434Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/002290/r3tmp/tmpsLQbYK/pdisk_1.dat 2025-11-03T14:03:47.614704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:47.614746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:47.627839Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:47.628912Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178627060607 != 1762178627060611 2025-11-03T14:03:47.660137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:47.707884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:47.751605Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:47.824592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.841955Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:693:2575] 2025-11-03T14:03:47.842025Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:47.854147Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:47.854208Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:47.854413Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:47.854425Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:47.854433Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:47.854523Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:47.854618Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:695:2577] 2025-11-03T14:03:47.854671Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:47.856316Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:47.856340Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:733:2575] in generation 1 2025-11-03T14:03:47.856780Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:47.856803Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:47.856945Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-11-03T14:03:47.856956Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-11-03T14:03:47.856964Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-11-03T14:03:47.857006Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:47.857052Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:47.857062Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:744:2577] in generation 1 2025-11-03T14:03:47.857132Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:701:2581] 2025-11-03T14:03:47.857162Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:47.858887Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:704:2584] 2025-11-03T14:03:47.858919Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:47.860490Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:47.860525Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:47.860679Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-11-03T14:03:47.860688Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037891 2025-11-03T14:03:47.860696Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037891 2025-11-03T14:03:47.860737Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:47.860758Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:47.860769Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037891 persisting started state actor id [1:757:2581] in generation 1 2025-11-03T14:03:47.860836Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:47.860851Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:47.860966Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-11-03T14:03:47.860973Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2025-11-03T14:03:47.860980Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2025-11-03T14:03:47.861009Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:47.861021Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:47.861028Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:758:2584] in generation 1 2025-11-03T14:03:47.871420Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:47.876657Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:47.876760Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:47.876791Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:763:2617] 2025-11-03T14:03:47.876797Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:47.876802Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:47.876810Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:47.876951Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:47.876960Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-11-03T14:03:47.876971Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:47.876979Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:764:2618] 2025-11-03T14:03:47.876984Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-11-03T14:03:47.876987Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-11-03T14:03:47.876991Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:47.877112Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:47.877120Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2025-11-03T14:03:47.877129Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:47.877138Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [1:765:2619] 2025-11-03T14:03:47.877142Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2025-11-03T14:03:47.877149Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-11-03T14:03:47.877153Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-11-03T14:03:47.877206Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:47.877234Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:47.877268Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:47.877275Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:47.877286Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:47.877292Z node 1 :TX_DATASHARD DEBUG ... nerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-11-03T14:03:50.872337Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:50.872427Z node 3 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:71: [ChangeSender][72075186224037889:1][3:722:2596] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 33 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 34 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 35 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 36 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 37 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 38 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 39 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 40 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 41 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 42 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 43 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 44 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 45 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 46 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 47 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 48 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 49 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 50 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 51 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 52 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 53 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 54 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 55 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 56 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 57 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 58 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 59 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 60 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 61 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 62 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 63 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 64 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 65 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 66 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 67 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 68 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 69 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 70 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 71 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 72 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 73 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 74 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 75 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 76 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 77 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 78 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 79 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 80 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 81 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 82 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 83 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 84 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 85 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 86 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 87 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 88 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 89 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 90 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 91 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 }] } 2025-11-03T14:03:50.872463Z node 3 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:224: [AsyncIndexChangeSenderMain][72075186224037889:1][3:755:2621] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 33 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 34 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 35 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 36 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 37 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 38 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 39 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 40 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 41 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 42 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 43 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 44 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 45 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 46 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 47 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 48 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 49 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 50 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 51 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 52 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 53 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 54 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 55 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 56 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 57 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 58 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 59 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 60 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 61 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 62 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 63 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 64 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 65 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 66 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 67 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 68 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 69 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 70 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 71 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 72 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 73 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 74 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 75 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 76 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 77 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 78 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 79 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 80 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 81 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 82 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 83 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 84 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 85 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 86 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 87 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 88 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 89 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 90 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 91 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 },{ Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 }] } 2025-11-03T14:03:50.872496Z node 3 :RPC_REQUEST DEBUG: upload_rows_common_impl.h:1275: Upload rows: got OK from shard 72075186224037889 description: 2025-11-03T14:03:50.872538Z node 3 :RPC_REQUEST DEBUG: upload_rows_common_impl.h:1405: completed with status SUCCESS >> test.py::test[tpch-q20-default.txt-Results] [GOOD] >> test.py::test[tpch-q21-default.txt-Results] >> test.py::test[insert-yql-14538--ForceBlocks] [GOOD] >> test.py::test[insert-yql-14538--Results] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::SortingsDifferentDirs+RemoveLimitOperator Test command err: Trying to start YDB, gRPC: 29063, MsgBus: 16296 2025-11-03T14:03:47.074044Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499576377655583:2071];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:47.074278Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e47/r3tmp/tmpbBtPjq/pdisk_1.dat 2025-11-03T14:03:47.107783Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:47.121577Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:47.121726Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499576377655543:2081] 1762178627073638 != 1762178627073641 TServer::EnableGrpc on GrpcPort 29063, node 1 2025-11-03T14:03:47.127022Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:47.127044Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:47.127046Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:47.127090Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16296 TClient is connected to server localhost:16296 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-11-03T14:03:47.177737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:47.177761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:47.178995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:47.182338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:47.362303Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:47.417894Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499576377656227:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.417901Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499576377656219:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.417914Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.417973Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499576377656233:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.417982Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.418702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:47.421191Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499576377656234:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:47.495909Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499576377656286:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:47.547478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.563065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.569647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.581088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.594785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.618579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.626748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.637327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.651073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.657604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.664793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.671464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.679100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose its ... -11-03T14:03:49.029843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:49.029856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:49.029868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:49.029894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:49.029905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:49.029918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:49.029930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:49.029939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:49.029950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:49.029957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:49.029969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:49.030011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:49.030024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:49.030039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:49.030050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:49.030057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:49.030069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:49.030076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:49.030087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:49.030101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:49.030112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:49.030124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:49.030134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:49.030372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:49.030391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:49.030431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:49.030444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:49.030471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:49.030482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:49.030496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:49.030507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:49.030517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:49.030529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:49.030537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:49.030544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:49.030581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:49.030593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:49.030609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:49.030640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:49.030653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:49.030659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:49.030671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:49.030677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:49.030693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:49.030704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:49.030716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:49.030727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:03:48.364262Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7F67938B3AC2 13. ??:0: ?? @ 0x7F67939458BF >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup >> TopicTimestamp::TimestampRead_1MB_Topic_exact_WithRestart [GOOD] >> TopicTimestamp::TimestampRead_1MB_Topic_offset+middle_WithRestart >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention [GOOD] >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::Sortings4Year+RemoveLimitOperator Test command err: Trying to start YDB, gRPC: 26462, MsgBus: 13942 2025-11-03T14:03:46.654584Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499568890468024:2067];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:46.654714Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001dfc/r3tmp/tmpuNeOdR/pdisk_1.dat 2025-11-03T14:03:46.715750Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:46.716061Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:46.716289Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499568890467988:2081] 1762178626654356 != 1762178626654359 TServer::EnableGrpc on GrpcPort 26462, node 1 2025-11-03T14:03:46.722862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:46.722875Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:46.722877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:46.722923Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13942 2025-11-03T14:03:46.758361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:46.758389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:46.759412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:46.780267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:46.966306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:47.091484Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499573185435960:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.091491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499573185435969:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.091511Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.091628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499573185435975:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.091654Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.092329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:47.094348Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499573185435974:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:47.162318Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499573185436027:2338] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:47.198599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.211411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.217207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.223628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.230301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.251284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.258590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.265634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.272216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.279737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.289698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.301166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.307773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose its ... -11-03T14:03:48.505056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:48.505070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:48.505080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:48.505130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:48.505160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:48.505177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:48.505187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:48.505193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:48.505201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:48.505207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:48.505217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:48.505290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:48.505300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:48.505319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:48.505329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:48.505339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:48.505348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:48.505359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:48.505368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:48.505386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:48.505396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:48.505412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:48.505421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:48.505968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:48.505986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:48.506000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:48.506004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:48.506063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:48.506075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:48.506087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:48.506093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:48.506105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:48.506128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:48.506141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:48.506151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:48.506232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:48.506244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:48.506258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:48.506267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:48.506273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:48.506278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:48.506283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:48.506288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:48.506300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:48.506304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:48.506315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:48.506319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:03:47.956336Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7FAA0E892AC2 13. ??:0: ?? @ 0x7FAA0E9248BF ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::SortingsSimpleOrderByAliasIndexDesc+RemoveLimitOperator Test command err: Trying to start YDB, gRPC: 12342, MsgBus: 8206 2025-11-03T14:03:46.696386Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499571485107410:2067];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:46.696772Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001dfe/r3tmp/tmp54WgKA/pdisk_1.dat 2025-11-03T14:03:46.747330Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:46.747360Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:46.747540Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499571485107383:2081] 1762178626696065 != 1762178626696068 TServer::EnableGrpc on GrpcPort 12342, node 1 2025-11-03T14:03:46.755531Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:46.755553Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:46.755556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:46.755614Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:8206 2025-11-03T14:03:46.803008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:46.803047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:46.804173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:46.818594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:46.987499Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:47.056619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499575780075368:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.056642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499575780075358:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.056657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.056727Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499575780075373:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.056744Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:47.057425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:47.059272Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499575780075372:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:47.123897Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499575780075425:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:47.182460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.196815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.203299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.209926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.216830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.245511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.252232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.265757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.272440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.279739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.292812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.301565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:47.315070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself ... -11-03T14:03:48.484482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:48.484496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:48.484501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:48.484521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:48.484533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:48.484539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:48.484542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:48.484548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:48.484550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:48.484555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:48.484557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:48.484575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:48.484582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:48.484592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:48.484598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:48.484601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:48.484604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:48.484607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:48.484610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:48.484616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:48.484619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:48.484625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:48.484644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:48.484927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:48.484943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:48.484950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:48.484953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:48.484969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:48.484976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:48.484982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:48.484985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:48.484990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:48.485000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:48.485006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:48.485010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:48.485044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:48.485053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:48.485063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:48.485066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:48.485069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:48.485072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:48.485075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:48.485077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:48.485084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:48.485087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:48.485093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:48.485095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:03:48.022398Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7FD747383AC2 13. ??:0: ?? @ 0x7FD7474158BF ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] Test command err: 2025-11-03T14:03:48.550248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:48.575363Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:48.577677Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:48.577765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:48.577803Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/002183/r3tmp/tmp2MFOKb/pdisk_1.dat 2025-11-03T14:03:48.647132Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:48.647163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:48.655146Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:48.655794Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178628186996 != 1762178628187000 2025-11-03T14:03:48.686669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:48.735293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:48.779136Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:48.852700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:48.869583Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:693:2575] 2025-11-03T14:03:48.869654Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:48.878340Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:48.878384Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:48.878515Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:48.878523Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:48.878528Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:48.878573Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:48.878658Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:695:2577] 2025-11-03T14:03:48.878706Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:48.879753Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:48.879770Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:733:2575] in generation 1 2025-11-03T14:03:48.880194Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:48.880234Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:48.880337Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-11-03T14:03:48.880345Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-11-03T14:03:48.880352Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-11-03T14:03:48.880392Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:48.880457Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:48.880467Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:744:2577] in generation 1 2025-11-03T14:03:48.880540Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:701:2581] 2025-11-03T14:03:48.880573Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:48.882037Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:704:2584] 2025-11-03T14:03:48.882068Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:48.883447Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:48.883509Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:48.883654Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-11-03T14:03:48.883664Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037891 2025-11-03T14:03:48.883671Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037891 2025-11-03T14:03:48.883709Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:48.883727Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:48.883736Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037891 persisting started state actor id [1:757:2581] in generation 1 2025-11-03T14:03:48.883789Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:48.883800Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:48.883897Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-11-03T14:03:48.883904Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2025-11-03T14:03:48.883908Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2025-11-03T14:03:48.883926Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:48.883936Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:48.883942Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:758:2584] in generation 1 2025-11-03T14:03:48.894326Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:48.898187Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:48.898270Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:48.898296Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:763:2617] 2025-11-03T14:03:48.898302Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:48.898308Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:48.898313Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:48.898455Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:48.898464Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-11-03T14:03:48.898475Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:48.898484Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:764:2618] 2025-11-03T14:03:48.898488Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-11-03T14:03:48.898492Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-11-03T14:03:48.898496Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:48.898602Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:48.898608Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2025-11-03T14:03:48.898618Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:48.898644Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [1:765:2619] 2025-11-03T14:03:48.898648Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2025-11-03T14:03:48.898655Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-11-03T14:03:48.898659Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-11-03T14:03:48.898712Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:48.898735Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:48.898769Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:48.898776Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:48.898784Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:48.898789Z node 1 :TX_DATASHARD DEBUG ... 037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-11-03T14:03:51.762023Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-11-03T14:03:51.762116Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:51.762474Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-11-03T14:03:51.762485Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:51.762597Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-11-03T14:03:51.762610Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:51.762866Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:51.762879Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:51.762886Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2025-11-03T14:03:51.762904Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:398:2397], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:51.762916Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-11-03T14:03:51.762928Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:51.763242Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:51.763594Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-11-03T14:03:51.763609Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-11-03T14:03:51.763775Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-11-03T14:03:51.765529Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:741:2611], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.765553Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:751:2616], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.765563Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.765739Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:756:2620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.765763Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.766704Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:51.767762Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:51.809005Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:51.902715Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:51.903216Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:755:2619], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:51.935108Z node 3 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [3:827:2660] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:51.948770Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715660. Ctx: { TraceId: 01k950g22n0b4nq32g46m3021w, Database: , SessionId: ydb://session/3?node_id=3&id=NWEyZGJiNDEtMTI1M2ZhZjQtYTA5MjQzZDYtNWFkMTQ4MmM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:51.949396Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:858:2677], serverId# [3:859:2678], sessionId# [0:0:0] 2025-11-03T14:03:51.949531Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:2] at 72075186224037888 2025-11-03T14:03:51.949573Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:2] at 72075186224037888, row count=3 2025-11-03T14:03:51.960047Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:51.969585Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715661. Ctx: { TraceId: 01k950g28s5c58krmkpma80efn, Database: , SessionId: ydb://session/3?node_id=3&id=ZWIxY2RiOTktOGM5MDg2ZC1iZDExYzI4YS1lOGM4YzhmNQ==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:51.970090Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:3] at 72075186224037888 2025-11-03T14:03:51.970129Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:3] at 72075186224037888, row count=3 2025-11-03T14:03:51.980559Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:51.993511Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k950g29d6qv0ggjjraas0bxt, Database: , SessionId: ydb://session/3?node_id=3&id=NjQxMzc2ZmUtYjcxNGY3YzQtNGNhZmZlODUtYjM2MTUzMTE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:51.994166Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:4] at 72075186224037888 2025-11-03T14:03:51.994219Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:457: Executed write operation for [0:4] at 72075186224037888, row count=3 2025-11-03T14:03:52.004671Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:52.005679Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-11-03T14:03:52.016165Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-11-03T14:03:52.016204Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:52.016704Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:52.016822Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:03:52.016879Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:52.016890Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-11-03T14:03:52.016898Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715663] at 72075186224037888 for WaitForStreamClearance 2025-11-03T14:03:52.016946Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-11-03T14:03:52.016959Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:52.017179Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715663, MessageQuota: 1 2025-11-03T14:03:52.017271Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-11-03T14:03:52.017312Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715663, PendingAcks: 0 2025-11-03T14:03:52.017320Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715663, MessageQuota: 0 2025-11-03T14:03:52.017446Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2025-11-03T14:03:52.017454Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715663, at: 72075186224037888 2025-11-03T14:03:52.017494Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:52.017500Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-11-03T14:03:52.017507Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715663] at 72075186224037888 for ReadTableScan 2025-11-03T14:03:52.017718Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:52.017733Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:52.017741Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> test.py::test[join-mergejoin_with_table_range--Results] [GOOD] >> test.py::test[join-nopushdown_filter_over_inner--ForceBlocks] >> test.py::test[join-join_cbo_3_tables--Results] [GOOD] >> test.py::test[join-join_comp_common_table--Results] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+ColumnStore >> KqpFlipJoin::Right_1 [GOOD] >> KqpFlipJoin::Right_2 >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink >> test.py::test[aggregate-group_by_expr_order_by_expr--Results] [GOOD] >> test.py::test[aggregate-group_by_gs_grouping--Results] >> KqpJoinOrder::TPCDS16-ColumnStore >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace >> KqpIndexLookupJoin::LeftSemi+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftSemi-StreamLookup >> test.py::test[pg-tpcds-q76-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q76-default.txt-Results] >> test.py::test[schema-select_field-schema-Results] [GOOD] >> test.py::test[schema-user_schema_append--Results] >> test.py::test[insert-yql-14538--Results] [GOOD] >> test.py::test[insert_monotonic-break_sort_fail--ForceBlocks] >> test.py::test[window-full/aggregations_leadlag_compact--Results] [GOOD] >> test.py::test[blocks-date_greater_scalar--ForceBlocks] [GOOD] >> test.py::test[window-full/session_incompat_sort--Results] >> test.py::test[blocks-date_greater_scalar--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] Test command err: 2025-11-03T14:03:48.133871Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:48.166858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:48.168353Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:48.168402Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:48.168423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/002274/r3tmp/tmpEVeAyu/pdisk_1.dat 2025-11-03T14:03:48.237855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:48.237888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:48.245968Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:48.246665Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178627693610 != 1762178627693614 2025-11-03T14:03:48.277666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:48.324155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:48.367050Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:48.440182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:48.455465Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:03:48.455708Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:03:48.455771Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:674:2566] 2025-11-03T14:03:48.455843Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:48.466003Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:03:48.466202Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:48.466231Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:48.466445Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:48.466458Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:48.466466Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:48.466529Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:48.466554Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:48.466569Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-11-03T14:03:48.476831Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:48.480260Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:48.480328Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:48.480350Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-11-03T14:03:48.480354Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:48.480358Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:48.480362Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:48.480421Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:674:2566], Recipient [1:674:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:48.480426Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:48.480505Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:48.480519Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:48.480584Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:48.480592Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:48.480597Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-11-03T14:03:48.480601Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-11-03T14:03:48.480604Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-11-03T14:03:48.480607Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:48.480611Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:48.480620Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:681:2570], Recipient [1:674:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:48.480623Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:48.480628Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:673:2565], serverId# [1:681:2570], sessionId# [0:0:0] 2025-11-03T14:03:48.480636Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:681:2570] 2025-11-03T14:03:48.480639Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-11-03T14:03:48.480654Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:48.480695Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-11-03T14:03:48.480703Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:48.480714Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:48.480720Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-11-03T14:03:48.480723Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-11-03T14:03:48.480727Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-11-03T14:03:48.480732Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-11-03T14:03:48.480824Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-11-03T14:03:48.480828Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-11-03T14:03:48.480831Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-11-03T14:03:48.480834Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-11-03T14:03:48.480841Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-11-03T14:03:48.480844Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-11-03T14:03:48.480848Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-11-03T14:03:48.480850Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-11-03T14:03:48.480854Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-11-03T14:03:48.481049Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269746185, Sender [1:693:2577], Recipient [1:674:2566]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-11-03T14:03:48.481055Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:03:48.491343Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:03:48.491386Z node 1 :TX_DATASHAR ... line.cpp:1878: Execution status for [3500:281474976715668] at 72075186224037889 is DelayComplete 2025-11-03T14:03:52.706883Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompleteOperation 2025-11-03T14:03:52.706887Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:281474976715668] at 72075186224037889 to execution unit CompletedOperations 2025-11-03T14:03:52.706891Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompletedOperations 2025-11-03T14:03:52.706898Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2025-11-03T14:03:52.706904Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompletedOperations 2025-11-03T14:03:52.706909Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [3500:281474976715668] at 72075186224037889 has finished 2025-11-03T14:03:52.706914Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:52.706917Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037889 2025-11-03T14:03:52.706921Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-11-03T14:03:52.706925Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037889 2025-11-03T14:03:52.717342Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-11-03T14:03:52.717375Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2025-11-03T14:03:52.717387Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2025-11-03T14:03:52.717410Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976715668] from 72075186224037889 at tablet 72075186224037889 send result to client [3:1148:2916], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:52.717425Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:52.717495Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 3500} 2025-11-03T14:03:52.717507Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-11-03T14:03:52.717512Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-11-03T14:03:52.717642Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287940, Sender [3:1148:2916], Recipient [3:974:2767]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715668 Cleared: true 2025-11-03T14:03:52.717651Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-11-03T14:03:52.717762Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [3:974:2767], Recipient [3:974:2767]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:52.717768Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:52.717778Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-11-03T14:03:52.717786Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-11-03T14:03:52.717795Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for WaitForStreamClearance 2025-11-03T14:03:52.717801Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:281474976715668] at 72075186224037890 on unit WaitForStreamClearance 2025-11-03T14:03:52.717810Z node 3 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [3500:281474976715668] at 72075186224037890 2025-11-03T14:03:52.717819Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-11-03T14:03:52.717828Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit WaitForStreamClearance 2025-11-03T14:03:52.717835Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:281474976715668] at 72075186224037890 to execution unit ReadTableScan 2025-11-03T14:03:52.717840Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-11-03T14:03:52.717900Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:281474976715668] at 72075186224037890 is Continue 2025-11-03T14:03:52.717906Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-11-03T14:03:52.717912Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037890 2025-11-03T14:03:52.717918Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:52: TPlanQueueUnit at 72075186224037890 out-of-order limits exceeded 2025-11-03T14:03:52.717923Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037890 2025-11-03T14:03:52.718057Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435082, Sender [3:1168:2934], Recipient [3:974:2767]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-11-03T14:03:52.718065Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-11-03T14:03:52.718135Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2025-11-03T14:03:52.718301Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-11-03T14:03:52.718333Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2025-11-03T14:03:52.718341Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2025-11-03T14:03:52.718437Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2025-11-03T14:03:52.718443Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715668, at: 72075186224037890 2025-11-03T14:03:52.718460Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [3:974:2767], Recipient [3:974:2767]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:52.718465Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:52.718473Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2025-11-03T14:03:52.718478Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-11-03T14:03:52.718484Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for ReadTableScan 2025-11-03T14:03:52.718489Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-11-03T14:03:52.718495Z node 3 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [3500:281474976715668] at 72075186224037890 error: , IsFatalError: 0 2025-11-03T14:03:52.718503Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-11-03T14:03:52.718510Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit ReadTableScan 2025-11-03T14:03:52.718516Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:281474976715668] at 72075186224037890 to execution unit CompleteOperation 2025-11-03T14:03:52.718521Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-11-03T14:03:52.718574Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:281474976715668] at 72075186224037890 is DelayComplete 2025-11-03T14:03:52.718578Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompleteOperation 2025-11-03T14:03:52.718583Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [3500:281474976715668] at 72075186224037890 to execution unit CompletedOperations 2025-11-03T14:03:52.718588Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompletedOperations 2025-11-03T14:03:52.718595Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-11-03T14:03:52.718599Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompletedOperations 2025-11-03T14:03:52.718604Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [3500:281474976715668] at 72075186224037890 has finished 2025-11-03T14:03:52.718608Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:52.718612Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2025-11-03T14:03:52.718615Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-11-03T14:03:52.718619Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2025-11-03T14:03:52.729651Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-11-03T14:03:52.729684Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2025-11-03T14:03:52.729696Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-11-03T14:03:52.729716Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976715668] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1148:2916], exec latency: 0 ms, propose latency: 1 ms 2025-11-03T14:03:52.729729Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown >> KqpJoinOrder::SortingsWithLookupJoin3+RemoveLimitOperator >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness1+EnableSeparationComputeActorsFromRead >> KqpJoin::IdxLookupPartialWithTempTable >> test.py::test[window-yql-20456-default.txt-ForceBlocks] [GOOD] >> test.py::test[window-yql-20456-default.txt-Results] >> KqpFlipJoin::Right_2 [GOOD] >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::Right_2 [GOOD] Test command err: Trying to start YDB, gRPC: 9445, MsgBus: 10488 2025-11-03T14:03:51.469796Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499590528872521:2075];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:51.469921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e3b/r3tmp/tmpQWYDYP/pdisk_1.dat 2025-11-03T14:03:51.519191Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:51.519346Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:51.519433Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499590528872484:2081] 1762178631469023 != 1762178631469026 TServer::EnableGrpc on GrpcPort 9445, node 1 2025-11-03T14:03:51.527424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:51.527440Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:51.527442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:51.527496Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10488 TClient is connected to server localhost:10488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:51.572277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:51.572308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:51.573448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:51.578229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:51.584736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:51.603520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:51.629635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:51.643263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:51.848436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499590528874129:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.848466Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.848546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499590528874139:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.848559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.900240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.907481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.921609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.935594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.949536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.957696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.970774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.986066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:52.003366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499594823842297:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:52.003395Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:52.003533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499594823842302:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:52.003542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499594823842303:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:52.003562Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:52.004777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called a ... Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:52.767816Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:52.777419Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:52.789788Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:52.813732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:52.827034Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:52.848827Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:53.112319Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499600328442881:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.112354Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.112491Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499600328442891:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.112516Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.119321Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.126687Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.140063Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.153730Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.167740Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.181935Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.196015Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.210659Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.226188Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499600328443753:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.226215Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499600328443758:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.226215Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.226268Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499600328443761:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.226280Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.227020Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:53.237121Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7568499600328443760:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-11-03T14:03:53.335310Z node 2 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [2:7568499600328443814:3553] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:53.590800Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.598645Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.608433Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.623085Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.696908Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> test.py::test[pg-tpcds-q76-default.txt-Results] [GOOD] >> test.py::test[pg-tpcds-q90-default.txt-ForceBlocks] >> test.py::test[produce-process_trivial_as_struct-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_trivial_as_struct-default.txt-Results] >> test.py::test[blocks-interval_add_date_scalar--Results] [GOOD] >> test.py::test[blocks-interval_mul_scalar--Results] >> KqpJoinOrder::TestJoinHint2-ColumnStore >> test.py::test[join-full_trivial--ForceBlocks] [GOOD] >> test.py::test[join-full_trivial--Results] >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin >> test.py::test[schema-user_schema_append--Results] [GOOD] >> test.py::test[select-column_labels-default.txt-Results] >> Balancing::Balancing_ManyTopics_PQv1 [GOOD] >> CommitOffset::Commit_Flat_WithWrongSession >> test.py::test[insert_monotonic-break_sort_fail--ForceBlocks] [GOOD] >> test.py::test[insert_monotonic-break_sort_fail--Results] [GOOD] >> test.py::test[insert_monotonic-keep_unique--ForceBlocks] [SKIPPED] >> KqpIndexLookupJoin::LeftSemi-StreamLookup [GOOD] >> test.py::test[insert_monotonic-keep_unique--Results] [SKIPPED] >> test.py::test[join-bush_dis_in_in_in--ForceBlocks] >> CommitOffset::Commit_WithoutSession_ParentNotFinished [GOOD] >> CommitOffset::Commit_WithoutSession_ToPastParentPartition >> KqpJoin::IdxLookupPartialWithTempTable [GOOD] >> KqpJoin::IdxLookupSelf >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemi-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 5506, MsgBus: 21284 2025-11-03T14:03:51.429860Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499592422706256:2067];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:51.430107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e3e/r3tmp/tmp0S7HBJ/pdisk_1.dat 2025-11-03T14:03:51.471212Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:51.471742Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:51.471894Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499592422706228:2081] 1762178631429551 != 1762178631429554 TServer::EnableGrpc on GrpcPort 5506, node 1 2025-11-03T14:03:51.481938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:51.481957Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:51.481959Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:51.482010Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21284 TClient is connected to server localhost:21284 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-11-03T14:03:51.533373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:51.533398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:51.534534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:51.540587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:51.560396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:51.581198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:51.598428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:51.608614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:51.758348Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:51.829121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499592422707875:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.829159Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.829265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499592422707885:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.829277Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.879625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.887079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.900207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.914747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.928640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.942928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.957208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.971212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.987709Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499592422708746:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.987738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.987746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499592422708752:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.987774Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499592422708754:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.987781Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.988574Z node 1 :FLAT_TX_SCHEMESHARD W ... 594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.084686Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.098257Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.112568Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.118574Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.125912Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.133125Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.149387Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.170161Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:54.216620Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.224359Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Key" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 '('"Fk" '"Key" '"Value")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($35) (FromFlow (Filter (ToFlow $35) (lambda '($36) (And (Exists (Member $36 '"Fk")) (Coalesce (!= (Member $36 '"Value") (String '"Value1")) (Bool 'false))))))) '('('"_logical_id" '1055) '('"_id" '"c45137b9-51ac0917-f07b56df-263b929f")))) (let $5 (DqCnUnionAll (TDqOutput $4 '"0"))) (let $6 (DqPhyStage '($5) (lambda '($37) $37) '('('"_logical_id" '1474) '('"_id" '"a7e9ea40-4690c8f1-89216676-5b1862e3")))) (let $7 (DqCnResult (TDqOutput $6 '"0") '())) (let $8 '('('"type" '"generic"))) (let $9 (KqpPhysicalTx '($4 $6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (OptionalType (DataType 'Int32))) (let $12 '('"Key" $11)) (let $13 (OptionalType (DataType 'String))) (let $14 (ListType (StructType '('"Fk" $11) $12 '('"Value" $13)))) (let $15 %kqp%tx_result_binding_0_0) (let $16 '('('"_logical_id" '1110) '('"_id" '"4d020606-b4e518e9-4f5df4e8-4162dc16") '('"_partition_mode" '"single"))) (let $17 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $15 (lambda '($38) (Member $38 '"Fk")) (Void) (Void) (lambda '($39) (Map (Filter (FlatMap $39 (lambda '($40) (Map (Take (Nth $40 '1) (Uint64 '1)) (lambda '($41) (AsStruct '('"Fk" (Member $41 '"Fk"))))))) (lambda '($42) (Exists (Member $42 '"Fk")))) (lambda '($43) (AsStruct '('"Key" (Member $43 '"Fk"))))))))) $16)) (let $18 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $19 '('"Key")) (let $20 (StructType $12)) (let $21 (KqpCnStreamLookup (TDqOutput $17 '"0") $18 $19 (ListType $20) '('('"Strategy" '"LookupRows")))) (let $22 '('('"_logical_id" '1204) '('"_id" '"6e56750c-b5520cb-c5b98e12-678d8343") '('"_wide_channels" $20))) (let $23 (DqPhyStage '($21) (lambda '($44) (block '( (let $45 (lambda '($46) (block '( (let $47 (Member $46 '"Key")) (return $47 (Exists $47)) )))) (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow $44) $45) (lambda '($48 $49) $49)) (lambda '($50 $51) $50)))) ))) $22)) (let $24 (DqCnBroadcast (TDqOutput $23 '"0"))) (let $25 (StructType '('"l.Fk" $11) '('"l.Key" $11) '('"l.Value" $13))) (let $26 '('('"_logical_id" '1353) '('"_id" '"bcbe269b-87888ec7-6e467573-91953807") '('"_wide_channels" $25))) (let $27 (DqPhyStage '($24) (lambda '($52) (block '( (let $53 '('One 'Hashed 'Compact)) (let $54 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $52) (lambda '($57) (Exists $57))) (lambda '($58) $58)) (lambda '($59) $59) (lambda '($60) (Void)) $53)) (let $55 (Sort (FlatMap $54 (lambda '($61) (block '( (let $62 '('"Fk" '"l.Fk" '"Key" '"l.Key" '"Value" '"l.Value")) (return (MapJoinCore (OrderedFilter (ToFlow $15) (lambda '($63) (Exists (Member $63 '"Fk")))) $61 '"LeftSemi" '('"Fk") $19 $62 '() '('"l.Fk") '('"r.Key"))) )))) (Bool 'true) (lambda '($64) (Member $64 '"l.Key")))) (let $56 (lambda '($65) (Member $65 '"l.Fk") (Member $65 '"l.Key") (Member $65 '"l.Value"))) (return (FromFlow (ExpandMap $55 $56))) ))) $26)) (let $28 (DqCnMerge (TDqOutput $27 '"0") '('('1 '"Asc")))) (let $29 (DqPhyStage '($28) (lambda '($66) (FromFlow (NarrowMap (ToFlow $66) (lambda '($67 $68 $69) (AsStruct '('"l.Fk" $67) '('"l.Key" $68) '('"l.Value" $69)))))) '('('"_logical_id" '1365) '('"_id" '"7246b7c2-9f1a8dba-ae1959f8-60677420")))) (let $30 '($17 $23 $27 $29)) (let $31 '('"l.Key" '"l.Fk" '"l.Value")) (let $32 (DqCnResult (TDqOutput $29 '"0") $31)) (let $33 (KqpTxResultBinding $14 '"0" '"0")) (let $34 (KqpPhysicalTx $30 '($32) '('($10 $33)) $8)) (return (KqpPhysicalQuery '($9 $34) '((KqpTxResultBinding (ListType $25) '1 '"0")) '('('"type" '"query")))) ) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Key" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 '('"Fk" '"Key" '"Value")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($36) (FromFlow (Filter (ToFlow $36) (lambda '($37) (And (Exists (Member $37 '"Fk")) (Coalesce (!= (Member $37 '"Value") (String '"Value1")) (Bool 'false))))))) '('('"_logical_id" '1086) '('"_id" '"5c92dfb2-c33087a0-54ccb4f-e6ae612e")))) (let $5 (DqCnUnionAll (TDqOutput $4 '0))) (let $6 (DqPhyStage '($5) (lambda '($38) $38) '('('"_logical_id" '1507) '('"_id" '"7372e9bd-7d1b0138-469da28f-1bc677f7")))) (let $7 (DqCnResult (TDqOutput $6 '0) '())) (let $8 '('('"type" '"data"))) (let $9 (KqpPhysicalTx '($4 $6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (OptionalType (DataType 'Int32))) (let $12 '('"Key" $11)) (let $13 (OptionalType (DataType 'String))) (let $14 (ListType (StructType '('"Fk" $11) $12 '('"Value" $13)))) (let $15 %kqp%tx_result_binding_0_0) (let $16 '('('"_logical_id" '1141) '('"_id" '"bb75a239-6a3a0e2c-57d5e0d0-7b00d8a3") '('"_partition_mode" '"single"))) (let $17 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $15 (lambda '($39) (Member $39 '"Fk")) (Void) (Void) (lambda '($40) (Map (Filter (FlatMap $40 (lambda '($41) (Map (Take (Nth $41 '1) (Uint64 '1)) (lambda '($42) (AsStruct '('"Fk" (Member $42 '"Fk"))))))) (lambda '($43) (Exists (Member $43 '"Fk")))) (lambda '($44) (AsStruct '('"Key" (Member $44 '"Fk"))))))))) $16)) (let $18 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $19 '('"Key")) (let $20 (StructType $12)) (let $21 (KqpCnStreamLookup (TDqOutput $17 '0) $18 $19 (ListType $20) '('('"Strategy" '"LookupRows")))) (let $22 '('('"_logical_id" '1235) '('"_id" '"3a8fded7-7bd9cff5-69cefa56-783384e7") '('"_wide_channels" $20))) (let $23 (DqPhyStage '($21) (lambda '($45) (block '( (let $46 (lambda '($47) (block '( (let $48 (Member $47 '"Key")) (return $48 (Exists $48)) )))) (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow $45) $46) (lambda '($49 $50) $50)) (lambda '($51 $52) $51)))) ))) $22)) (let $24 (DqCnBroadcast (TDqOutput $23 '0))) (let $25 (Uint64 '"1001")) (let $26 (StructType '('"l.Fk" $11) '('"l.Key" $11) '('"l.Value" $13))) (let $27 '('('"_logical_id" '1384) '('"_id" '"674c62a8-65b8666a-c01548de-5c832ebc") '('"_wide_channels" $26))) (let $28 (DqPhyStage '($24) (lambda '($53) (block '( (let $54 '('One 'Hashed 'Compact)) (let $55 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $53) (lambda '($58) (Exists $58))) (lambda '($59) $59)) (lambda '($60) $60) (lambda '($61) (Void)) $54)) (let $56 (TopSort (FlatMap $55 (lambda '($62) (block '( (let $63 '('"Fk" '"l.Fk" '"Key" '"l.Key" '"Value" '"l.Value")) (return (MapJoinCore (OrderedFilter (ToFlow $15) (lambda '($64) (Exists (Member $64 '"Fk")))) $62 '"LeftSemi" '('"Fk") $19 $63 '() '('"l.Fk") '('"r.Key"))) )))) $25 (Bool 'true) (lambda '($65) (Member $65 '"l.Key")))) (let $57 (lambda '($66) (Member $66 '"l.Fk") (Member $66 '"l.Key") (Member $66 '"l.Value"))) (return (FromFlow (ExpandMap $56 $57))) ))) $27)) (let $29 (DqCnMerge (TDqOutput $28 '0) '('('1 '"Asc")))) (let $30 (DqPhyStage '($29) (lambda '($67) (FromFlow (NarrowMap (Take (ToFlow $67) $25) (lambda '($68 $69 $70) (AsStruct '('"l.Fk" $68) '('"l.Key" $69) '('"l.Value" $70)))))) '('('"_logical_id" '1397) '('"_id" '"52561005-88238c4-b5489b9b-3730caa2")))) (let $31 '($17 $23 $28 $30)) (let $32 '('"l.Key" '"l.Fk" '"l.Value")) (let $33 (DqCnResult (TDqOutput $30 '0) $32)) (let $34 (KqpTxResultBinding $14 '0 '0)) (let $35 (KqpPhysicalTx $31 '($33) '('($10 $34)) $8)) (return (KqpPhysicalQuery '($9 $35) '((KqpTxResultBinding (ListType $26) '1 '0)) '('('"type" '"data_query")))) ) >> test.py::test[window-yql-20456-default.txt-Results] [GOOD] >> test.py::test[ypath-multi_key-default.txt-ForceBlocks] >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 >> KqpJoinOrder::ShuffleEliminationOneJoin-EnableSeparationComputeActorsFromRead >> test.py::test[produce-process_trivial_as_struct-default.txt-Results] [GOOD] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-11-03T14:02:57.954445Z :WriteRAW INFO: Random seed for debugging is 1762178577954432 2025-11-03T14:02:58.444604Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7568499363439440243:2172];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:02:58.444665Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/002df4/r3tmp/tmp184l5H/pdisk_1.dat 2025-11-03T14:02:58.445207Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:02:58.447384Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:02:58.538957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:02:58.539045Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:02:58.541278Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:02:58.554438Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7568499363439440099:2073] 1762178578438255 != 1762178578438252 2025-11-03T14:02:58.554900Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:02:58.565026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:02:58.565064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:02:58.565799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:02:58.565818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:02:58.574264Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-11-03T14:02:58.574309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:02:58.575124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5586, node 1 2025-11-03T14:02:58.614962Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ud3d/002df4/r3tmp/yandexXr9uSz.tmp 2025-11-03T14:02:58.614987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ud3d/002df4/r3tmp/yandexXr9uSz.tmp 2025-11-03T14:02:58.615079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ud3d/002df4/r3tmp/yandexXr9uSz.tmp 2025-11-03T14:02:58.615115Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:02:58.618335Z INFO: TTestServer started on Port 61652 GrpcPort 5586 TClient is connected to server localhost:61652 PQClient connected to localhost:5586 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:02:58.682439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... waiting... waiting... 2025-11-03T14:02:58.835225Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:02:58.847012Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:02:59.087712Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499367734407772:2299], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:02:59.087747Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499367734407761:2296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:02:59.087780Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:02:59.088331Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499367734407776:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:02:59.088351Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:02:59.097497Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499370647264229:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:02:59.097528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:02:59.097700Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499370647264256:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:02:59.097707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499370647264257:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:02:59.097723Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:02:59.098527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:02:59.103934Z node 2 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [2:7568499367734407777:2129] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:02:59.134741Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499370647264260:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-11-03T14:02:59.134924Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7568499367734407775:2300], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-11-03T14:02:59.167881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:02:59.203067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:02:59.222015Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499370647264571:2829] txid# 281474976720664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:02:59.243761Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilati ... rsisted=0 Status=SUCCESS 2025-11-03T14:03:54.660726Z node 15 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [15:7568499604686174317:2472] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-11-03T14:03:54.660728Z node 15 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [15:7568499604686174317:2472] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-11-03T14:03:54.660735Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-11-03T14:03:54.660847Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:822: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 15, Generation: 1 2025-11-03T14:03:54.660861Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2721: [PQ: 72075186224037892] server connected, pipe [15:7568499604686174376:2472], now have 1 active actors on pipe 2025-11-03T14:03:54.660866Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-11-03T14:03:54.660870Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2635: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-11-03T14:03:54.660887Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src|5a64320f-91c6ed0f-2a9ee2ee-ab463a1c_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-11-03T14:03:54.660898Z node 15 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:54.660902Z node 15 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:54.660903Z node 15 :PERSQUEUE DEBUG: partition.cpp:2302: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2025-11-03T14:03:54.660911Z node 15 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:54.660927Z node 15 :PERSQUEUE DEBUG: partition_write.cpp:35: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-11-03T14:03:54.660944Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-11-03T14:03:54.660975Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-11-03T14:03:54.660980Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2635: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-11-03T14:03:54.660988Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-11-03T14:03:54.661017Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|5a64320f-91c6ed0f-2a9ee2ee-ab463a1c_0 2025-11-03T14:03:54.661329Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1762178634661 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-11-03T14:03:54.661357Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|5a64320f-91c6ed0f-2a9ee2ee-ab463a1c_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-11-03T14:03:54.661450Z :INFO: [] MessageGroupId [src] SessionId [src|5a64320f-91c6ed0f-2a9ee2ee-ab463a1c_0] Write session: close. Timeout = 0 ms 2025-11-03T14:03:54.661459Z :INFO: [] MessageGroupId [src] SessionId [src|5a64320f-91c6ed0f-2a9ee2ee-ab463a1c_0] Write session will now close 2025-11-03T14:03:54.661463Z :DEBUG: [] MessageGroupId [src] SessionId [src|5a64320f-91c6ed0f-2a9ee2ee-ab463a1c_0] Write session: aborting 2025-11-03T14:03:54.661567Z :INFO: [] MessageGroupId [src] SessionId [src|5a64320f-91c6ed0f-2a9ee2ee-ab463a1c_0] Write session: gracefully shut down, all writes complete 2025-11-03T14:03:54.661571Z :DEBUG: [] MessageGroupId [src] SessionId [src|5a64320f-91c6ed0f-2a9ee2ee-ab463a1c_0] Write session: destroy 2025-11-03T14:03:54.661678Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|5a64320f-91c6ed0f-2a9ee2ee-ab463a1c_0 grpc read done: success: 0 data: 2025-11-03T14:03:54.661690Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|5a64320f-91c6ed0f-2a9ee2ee-ab463a1c_0 grpc read failed 2025-11-03T14:03:54.661697Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|5a64320f-91c6ed0f-2a9ee2ee-ab463a1c_0 grpc closed 2025-11-03T14:03:54.661699Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|5a64320f-91c6ed0f-2a9ee2ee-ab463a1c_0 is DEAD 2025-11-03T14:03:54.661824Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-11-03T14:03:54.661864Z node 15 :PERSQUEUE DEBUG: pq_impl.cpp:2746: [PQ: 72075186224037892] server disconnected, pipe [15:7568499604686174376:2472] destroyed 2025-11-03T14:03:54.661876Z node 15 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2025-11-03T14:03:54.661885Z node 15 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:54.661887Z node 15 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:54.661889Z node 15 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:54.665427Z :INFO: [/Root] [/Root] [eec2f572-1ece15ab-55b79ecb-4c7747c6] Starting read session 2025-11-03T14:03:54.665435Z :DEBUG: [/Root] [/Root] [eec2f572-1ece15ab-55b79ecb-4c7747c6] Starting cluster discovery 2025-11-03T14:03:54.665458Z :INFO: [/Root] [/Root] [eec2f572-1ece15ab-55b79ecb-4c7747c6] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25953: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25953
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:25953. " 2025-11-03T14:03:54.665462Z :DEBUG: [/Root] [/Root] [eec2f572-1ece15ab-55b79ecb-4c7747c6] Restart cluster discovery in 0.007123s 2025-11-03T14:03:54.672806Z :DEBUG: [/Root] [/Root] [eec2f572-1ece15ab-55b79ecb-4c7747c6] Starting cluster discovery 2025-11-03T14:03:54.672888Z :INFO: [/Root] [/Root] [eec2f572-1ece15ab-55b79ecb-4c7747c6] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25953: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25953
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:25953. " 2025-11-03T14:03:54.672894Z :DEBUG: [/Root] [/Root] [eec2f572-1ece15ab-55b79ecb-4c7747c6] Restart cluster discovery in 0.016628s 2025-11-03T14:03:54.689817Z :DEBUG: [/Root] [/Root] [eec2f572-1ece15ab-55b79ecb-4c7747c6] Starting cluster discovery 2025-11-03T14:03:54.689874Z :INFO: [/Root] [/Root] [eec2f572-1ece15ab-55b79ecb-4c7747c6] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25953: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25953
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:25953. " 2025-11-03T14:03:54.689880Z :DEBUG: [/Root] [/Root] [eec2f572-1ece15ab-55b79ecb-4c7747c6] Restart cluster discovery in 0.031535s 2025-11-03T14:03:54.715058Z node 15 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:54.715079Z node 15 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:54.715082Z node 15 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:54.721812Z :DEBUG: [/Root] [/Root] [eec2f572-1ece15ab-55b79ecb-4c7747c6] Starting cluster discovery 2025-11-03T14:03:54.721918Z :NOTICE: [/Root] [/Root] [eec2f572-1ece15ab-55b79ecb-4c7747c6] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25953: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25953
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:25953. " } 2025-11-03T14:03:54.721994Z :NOTICE: [/Root] [/Root] [eec2f572-1ece15ab-55b79ecb-4c7747c6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25953: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25953
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:25953. " } 2025-11-03T14:03:54.722031Z :INFO: [/Root] [/Root] [eec2f572-1ece15ab-55b79ecb-4c7747c6] Closing read session. Close timeout: 0.000000s 2025-11-03T14:03:54.722041Z :NOTICE: [/Root] [/Root] [eec2f572-1ece15ab-55b79ecb-4c7747c6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-11-03T14:03:54.815375Z node 15 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:54.815393Z node 15 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:54.815400Z node 15 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:54.915749Z node 15 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:54.915768Z node 15 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:54.915772Z node 15 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:03:55.016098Z node 15 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:03:55.016122Z node 15 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:03:55.016125Z node 15 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup [GOOD] >> test.py::test[join-nopushdown_filter_over_inner--ForceBlocks] [GOOD] >> test.py::test[join-nopushdown_filter_over_inner--Results] >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 30652, MsgBus: 22828 2025-11-03T14:03:52.280282Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499597197538847:2067];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:52.280418Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e37/r3tmp/tmpLYqIGQ/pdisk_1.dat 2025-11-03T14:03:52.317893Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:52.318433Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:52.318611Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499597197538819:2081] 1762178632279979 != 1762178632279982 TServer::EnableGrpc on GrpcPort 30652, node 1 2025-11-03T14:03:52.328416Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:52.328426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:52.328428Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:52.328459Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22828 TClient is connected to server localhost:22828 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-11-03T14:03:52.383887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:52.383916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:52.384933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:52.391680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:52.398395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:52.416982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:52.437225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:52.448818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:52.546303Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:52.641069Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499597197540464:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:52.641097Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:52.641180Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499597197540474:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:52.641189Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:52.710848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:52.718847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:52.727329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:52.741088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:52.754726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:52.768412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:52.782920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:52.797041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:52.812471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499597197541337:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:52.812496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499597197541342:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:52.812499Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:52.812541Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499597197541345:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:52.812548Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:52.813247Z node 1 :FLAT_TX_SCHEMESHARD ... uboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.819609Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.834010Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.847832Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.861210Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.875410Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.889470Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.903597Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.918040Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.932508Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.946809Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.976903Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.985641Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.039008Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Fk" '"Value") '() (Void) '())) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '1124) '('"_id" '"31b49a87-8ba68ac7-ff018a1e-8c395927") $3)) (let $5 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($32) $32) $4)) (let $6 (DqCnResult (TDqOutput $5 '"0") '())) (let $7 '('('"type" '"generic"))) (let $8 (KqpPhysicalTx '($5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (OptionalType (DataType 'Int32))) (let $11 (OptionalType (DataType 'String))) (let $12 (ListType (StructType '('"Fk" $10) '('"Value" $11)))) (let $13 %kqp%tx_result_binding_0_0) (let $14 '('('"_logical_id" '818) '('"_id" '"ecddf29f-2e12ce9f-4498822d-f5394723") $3)) (let $15 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $13 (lambda '($33) (Member $33 '"Fk")) (Void) (Void) (lambda '($34) (Map (Filter (FlatMap $34 (lambda '($35) (Map (Take (Nth $35 '1) (Uint64 '1)) (lambda '($36) (AsStruct '('"Fk" (Member $36 '"Fk"))))))) (lambda '($37) (Exists (Member $37 '"Fk")))) (lambda '($38) (AsStruct '('"Key" (Member $38 '"Fk"))))))))) $14)) (let $16 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $17 '('"Key")) (let $18 (StructType '('"Key" $10))) (let $19 (KqpCnStreamLookup (TDqOutput $15 '"0") $16 $17 (ListType $18) '('('"Strategy" '"LookupRows")))) (let $20 '('('"_logical_id" '902) '('"_id" '"aaed2745-bb43eb00-ddb2ce75-4a42184") '('"_wide_channels" $18))) (let $21 (DqPhyStage '($19) (lambda '($39) (block '( (let $40 (lambda '($41) (block '( (let $42 (Member $41 '"Key")) (return $42 (Exists $42)) )))) (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow $39) $40) (lambda '($43 $44) $44)) (lambda '($45 $46) $45)))) ))) $20)) (let $22 (DqCnBroadcast (TDqOutput $21 '"0"))) (let $23 (StructType '('"l.Value" $11))) (let $24 '('('"_logical_id" '1078) '('"_id" '"87579516-af5de65b-f0f30e7b-104b3bf2") '('"_wide_channels" $23))) (let $25 (DqPhyStage '($22) (lambda '($47) (block '( (let $48 '('One 'Hashed 'Compact)) (let $49 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $47) (lambda '($51) (Exists $51))) (lambda '($52) $52)) (lambda '($53) $53) (lambda '($54) (Void)) $48)) (let $50 (Sort (FlatMap $49 (lambda '($55) (MapJoinCore (ToFlow $13) $55 '"LeftOnly" '('"Fk") $17 '('"Value" '"l.Value") '() '('"l.Fk") '('"r.Key")))) (Bool 'true) (lambda '($56) (Member $56 '"l.Value")))) (return (FromFlow (ExpandMap $50 (lambda '($57) (Member $57 '"l.Value"))))) ))) $24)) (let $26 (DqCnMerge (TDqOutput $25 '"0") '('('"0" '"Asc")))) (let $27 (DqPhyStage '($26) (lambda '($58) (FromFlow (NarrowMap (ToFlow $58) (lambda '($59) (AsStruct '('"l.Value" $59)))))) '('('"_logical_id" '1090) '('"_id" '"3b5944c7-d2bf0e7f-7c86bf92-e6c95775")))) (let $28 '($15 $21 $25 $27)) (let $29 (DqCnResult (TDqOutput $27 '"0") '('"l.Value"))) (let $30 (KqpTxResultBinding $12 '"0" '"0")) (let $31 (KqpPhysicalTx $28 '($29) '('($9 $30)) $7)) (return (KqpPhysicalQuery '($8 $31) '((KqpTxResultBinding (ListType $23) '1 '"0")) '('('"type" '"query")))) ) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Fk" '"Value") '() (Void) '())) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '1157) '('"_id" '"9ff87aa5-58d45830-3facce5d-1c888e62") $3)) (let $5 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($33) $33) $4)) (let $6 (DqCnResult (TDqOutput $5 '0) '())) (let $7 '('('"type" '"data"))) (let $8 (KqpPhysicalTx '($5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (OptionalType (DataType 'Int32))) (let $11 (OptionalType (DataType 'String))) (let $12 (ListType (StructType '('"Fk" $10) '('"Value" $11)))) (let $13 %kqp%tx_result_binding_0_0) (let $14 '('('"_logical_id" '849) '('"_id" '"ce4a4974-dfc4050f-65f2b4af-bb52d4fd") $3)) (let $15 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $13 (lambda '($34) (Member $34 '"Fk")) (Void) (Void) (lambda '($35) (Map (Filter (FlatMap $35 (lambda '($36) (Map (Take (Nth $36 '1) (Uint64 '1)) (lambda '($37) (AsStruct '('"Fk" (Member $37 '"Fk"))))))) (lambda '($38) (Exists (Member $38 '"Fk")))) (lambda '($39) (AsStruct '('"Key" (Member $39 '"Fk"))))))))) $14)) (let $16 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $17 '('"Key")) (let $18 (StructType '('"Key" $10))) (let $19 (KqpCnStreamLookup (TDqOutput $15 '0) $16 $17 (ListType $18) '('('"Strategy" '"LookupRows")))) (let $20 '('('"_logical_id" '933) '('"_id" '"b72787f0-30b74b50-ec8cd2e1-a19f819f") '('"_wide_channels" $18))) (let $21 (DqPhyStage '($19) (lambda '($40) (block '( (let $41 (lambda '($42) (block '( (let $43 (Member $42 '"Key")) (return $43 (Exists $43)) )))) (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow $40) $41) (lambda '($44 $45) $45)) (lambda '($46 $47) $46)))) ))) $20)) (let $22 (DqCnBroadcast (TDqOutput $21 '0))) (let $23 (Uint64 '"1001")) (let $24 (StructType '('"l.Value" $11))) (let $25 '('('"_logical_id" '1109) '('"_id" '"219a44e7-2d8e04ca-3af6058b-6e9c6aa4") '('"_wide_channels" $24))) (let $26 (DqPhyStage '($22) (lambda '($48) (block '( (let $49 '('One 'Hashed 'Compact)) (let $50 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $48) (lambda '($52) (Exists $52))) (lambda '($53) $53)) (lambda '($54) $54) (lambda '($55) (Void)) $49)) (let $51 (TopSort (FlatMap $50 (lambda '($56) (MapJoinCore (ToFlow $13) $56 '"LeftOnly" '('"Fk") $17 '('"Value" '"l.Value") '() '('"l.Fk") '('"r.Key")))) $23 (Bool 'true) (lambda '($57) (Member $57 '"l.Value")))) (return (FromFlow (ExpandMap $51 (lambda '($58) (Member $58 '"l.Value"))))) ))) $25)) (let $27 (DqCnMerge (TDqOutput $26 '0) '('('0 '"Asc")))) (let $28 (DqPhyStage '($27) (lambda '($59) (FromFlow (NarrowMap (Take (ToFlow $59) $23) (lambda '($60) (AsStruct '('"l.Value" $60)))))) '('('"_logical_id" '1122) '('"_id" '"3bb61dc3-517ecbf8-de66959f-7182b136")))) (let $29 '($15 $21 $26 $28)) (let $30 (DqCnResult (TDqOutput $28 '0) '('"l.Value"))) (let $31 (KqpTxResultBinding $12 '0 '0)) (let $32 (KqpPhysicalTx $29 '($30) '('($9 $31)) $7)) (return (KqpPhysicalQuery '($8 $32) '((KqpTxResultBinding (ListType $24) '1 '0)) '('('"type" '"data_query")))) ) >> test.py::test[window-win_with_cur_row--Results] [GOOD] >> test.py::test[ypath-direct_read_from_dynamic--Results] >> KqpJoin::IdxLookupSelf [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] >> test.py::test[join-full_trivial--Results] [GOOD] >> KqpIndexLookupJoin::LeftJoinNonPkJoinConditionsWithCast+StreamLookupJoin ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin [GOOD] Test command err: Trying to start YDB, gRPC: 19091, MsgBus: 15102 2025-11-03T14:03:53.478797Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499600752783257:2069];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:53.479073Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e31/r3tmp/tmp3JsQel/pdisk_1.dat 2025-11-03T14:03:53.521634Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:53.522091Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:53.522247Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499600752783228:2081] 1762178633478338 != 1762178633478341 TServer::EnableGrpc on GrpcPort 19091, node 1 2025-11-03T14:03:53.533779Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:53.533791Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:53.533792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:53.533818Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15102 2025-11-03T14:03:53.582029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:53.582055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:15102 2025-11-03T14:03:53.583100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:53.593165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:53.605429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:53.621271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:53.645434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:53.657478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:53.746563Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:53.942361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499600752784873:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.942389Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.942456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499600752784883:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.942463Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.011100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.018526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.028396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.042574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.048744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.056096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.070401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.084970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.101028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499605047753042:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.101050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499605047753047:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.101055Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.101091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499605047753050:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.101104Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.101683Z node 1 :FLAT_TX_SCHEMESHARD ... 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:54.849903Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:54.869065Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:54.879116Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:54.905714Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:54.917758Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:55.037827Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:55.252775Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499609536237638:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.252900Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.256776Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499609536237721:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.256813Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.257327Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.267067Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.274667Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.288463Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.302274Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.317484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.330677Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.344873Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.362432Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499609536238510:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.362459Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.362497Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499609536238515:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.362525Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499609536238516:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.362556Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.363223Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:55.371316Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7568499609536238519:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-11-03T14:03:55.435704Z node 2 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [2:7568499609536238571:3554] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:55.623312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.630290Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.637690Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.794689Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |56.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] |56.0%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |56.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupSelf [GOOD] Test command err: Trying to start YDB, gRPC: 17965, MsgBus: 64987 2025-11-03T14:03:53.771869Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499601340760914:2066];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:53.771885Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e2e/r3tmp/tmp3iqapH/pdisk_1.dat 2025-11-03T14:03:53.816517Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:53.827145Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:53.827357Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499601340760888:2081] 1762178633771626 != 1762178633771629 TServer::EnableGrpc on GrpcPort 17965, node 1 2025-11-03T14:03:53.837279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:53.837291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:53.837293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:53.837326Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64987 2025-11-03T14:03:53.879177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:53.879201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:53.880256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:53.895570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:53.969328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:53.985471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:54.002833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:54.015220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:54.065184Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:54.159643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499605635729832:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.159674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.159733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499605635729842:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.159745Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.205366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.212756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.224807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.238501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.252004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.266432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.280721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.295013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.311566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499605635730705:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.311589Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499605635730710:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.311595Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.311638Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499605635730713:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.311644Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.312318Z node 1 :FLAT_TX_SCHEMESHARD ... 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:55.148644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:55.158189Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:55.169150Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:55.189138Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:55.202269Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:55.325757Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:55.539865Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499607422451658:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.539883Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.539936Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499607422451667:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.539947Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.553297Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.560732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.567933Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.575050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.582173Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.596473Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.610474Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.624598Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.640337Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499607422452530:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.640357Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499607422452535:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.640364Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.640443Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499607422452538:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.640458Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:55.640893Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:55.644032Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7568499607422452537:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-11-03T14:03:55.703496Z node 2 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [2:7568499607422452591:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:55.931594Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.940233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.953732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:56.075470Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 15809, MsgBus: 62341 2025-11-03T14:03:53.004082Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499600695904530:2068];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:53.004499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e32/r3tmp/tmpNFP2tK/pdisk_1.dat 2025-11-03T14:03:53.040748Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:53.054615Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:53.054848Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499600695904502:2081] 1762178633003636 != 1762178633003639 TServer::EnableGrpc on GrpcPort 15809, node 1 2025-11-03T14:03:53.066037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:53.066054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:53.066055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:53.066091Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62341 TClient is connected to server localhost:62341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-11-03T14:03:53.107895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:53.107938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:53.109107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:53.118899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:53.128456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:53.144812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:53.167724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:53.181090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:53.207639Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:53.461426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499600695906152:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.461450Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.461534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499600695906162:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.461544Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.503232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.511090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.524943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.538479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.545632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.559861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.573816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.588886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.605179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499600695907026:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.605213Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499600695907031:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.605220Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.605260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499600695907034:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.605275Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.605878Z node 1 :FLAT_TX_SCHEMESHARD ... eate_table.cpp:690) 2025-11-03T14:03:55.631219Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.638162Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.644950Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.652329Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.659895Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.676204Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.688563Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.718988Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.725683Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.785562Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Key" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 '('"Fk" '"Key" '"Value")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 '('"_partition_mode" '"single")) (let $5 '('('"_logical_id" '1108) '('"_id" '"393e2158-f041be11-366ef6b0-a23f4e4b") $4)) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($36) $36) $5)) (let $7 (DqCnResult (TDqOutput $6 '0) '())) (let $8 '('('"type" '"generic"))) (let $9 (KqpPhysicalTx '($6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (DataType 'Int32)) (let $12 (OptionalType $11)) (let $13 '('"Key" $12)) (let $14 (OptionalType (DataType 'String))) (let $15 '('"Value" $14)) (let $16 (ListType (StructType '('"Fk" $12) $13 $15))) (let $17 %kqp%tx_result_binding_0_0) (let $18 '('('"_logical_id" '852) '('"_id" '"dad10fd6-d92cface-4dd779a7-91d31afa") $4)) (let $19 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $17 (lambda '($37) (Member $37 '"Fk")) (Void) (Void) (lambda '($38) (Map (Filter (FlatMap $38 (lambda '($39) (Map (Take (Nth $39 '1) (Uint64 '1)) (lambda '($40) (AsStruct '('"Fk" (Member $40 '"Fk"))))))) (lambda '($41) (Exists (Member $41 '"Fk")))) (lambda '($42) (AsStruct '('"Key" (Member $42 '"Fk"))))))))) $18)) (let $20 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $21 (KqpCnStreamLookup (TDqOutput $19 '0) $20 '('"Key" '"Value") (ListType (StructType $13)) '('('"Strategy" '"LookupRows")))) (let $22 (StructType $13 $15)) (let $23 '('('"_logical_id" '934) '('"_id" '"988ec11-225e49cb-5486079a-8cbdc10d") '('"_wide_channels" $22))) (let $24 (DqPhyStage '($21) (lambda '($43) (block '( (let $44 (lambda '($46) (block '( (let $47 (Member $46 '"Key")) (return $47 (Member $46 '"Value") (Exists $47)) )))) (let $45 (lambda '($51 $52 $53) $51 $52)) (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow $43) $44) (lambda '($48 $49 $50) $50)) $45))) ))) $23)) (let $25 (DqCnBroadcast (TDqOutput $24 '0))) (let $26 (StructType '('"l.Fk" $12) '('"l.Key" $12) '('"l.Value" $14) '('"r.Key" $12) '('"r.Value" $14))) (let $27 '('('"_logical_id" '1063) '('"_id" '"b5022835-b42aa32-3fc2e36d-22c0fa9d") '('"_wide_channels" $26))) (let $28 (DqPhyStage '($25) (lambda '($54) (block '( (let $55 '('Many 'Hashed 'Compact)) (let $56 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $54) (lambda '($59 $60) (Exists $59))) (lambda '($61 $62) (IfPresent $61 (lambda '($63) (Just '($63 (AsStruct '('"Key" $61) '('"Value" $62))))) (Nothing (OptionalType (TupleType $11 $22)))))) (lambda '($64) (Nth $64 '0)) (lambda '($65) (Nth $65 '1)) $55)) (let $57 (Sort (FlatMap $56 (lambda '($66) (block '( (let $67 '('"Fk" '"l.Fk" '"Key" '"l.Key" '"Value" '"l.Value")) (let $68 '('"Key" '"r.Key" '"Value" '"r.Value")) (return (MapJoinCore (ToFlow $17) $66 '"Left" '('"Fk") '('"Key") $67 $68 '('"l.Fk") '('"r.Key"))) )))) (Bool 'true) (lambda '($69) (Member $69 '"l.Key")))) (let $58 (lambda '($70) (Member $70 '"l.Fk") (Member $70 '"l.Key") (Member $70 '"l.Value") (Member $70 '"r.Key") (Member $70 '"r.Value"))) (return (FromFlow (ExpandMap $57 $58))) ))) $27)) (let $29 (DqCnMerge (TDqOutput $28 '0) '('('1 '"Asc")))) (let $30 (DqPhyStage '($29) (lambda '($71) (FromFlow (NarrowMap (ToFlow $71) (lambda '($72 $73 $74 $75 $76) (AsStruct '('"l.Fk" $72) '('"l.Key" $73) '('"l.Value" $74) '('"r.Key" $75) '('"r.Value" $76)))))) '('('"_logical_id" '1075) '('"_id" '"4cff916e-a48b11bc-ebf610e7-77817452")))) (let $31 '($19 $24 $28 $30)) (let $32 '('"l.Key" '"l.Fk" '"l.Value" '"r.Key" '"r.Value")) (let $33 (DqCnResult (TDqOutput $30 '0) $32)) (let $34 (KqpTxResultBinding $16 '0 '0)) (let $35 (KqpPhysicalTx $31 '($33) '('($10 $34)) $8)) (return (KqpPhysicalQuery '($9 $35) '((KqpTxResultBinding (ListType $26) '1 '0)) '('('"type" '"query")))) ) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Key" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 '('"Fk" '"Key" '"Value")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 '('"_partition_mode" '"single")) (let $5 '('('"_logical_id" '1141) '('"_id" '"f6e403ab-e9afae10-8f7bcb07-e9002fc9") $4)) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($37) $37) $5)) (let $7 (DqCnResult (TDqOutput $6 '0) '())) (let $8 '('('"type" '"data"))) (let $9 (KqpPhysicalTx '($6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (DataType 'Int32)) (let $12 (OptionalType $11)) (let $13 '('"Key" $12)) (let $14 (OptionalType (DataType 'String))) (let $15 '('"Value" $14)) (let $16 (ListType (StructType '('"Fk" $12) $13 $15))) (let $17 %kqp%tx_result_binding_0_0) (let $18 '('('"_logical_id" '883) '('"_id" '"f3b864cb-eaef409d-b60b6a27-2df10560") $4)) (let $19 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $17 (lambda '($38) (Member $38 '"Fk")) (Void) (Void) (lambda '($39) (Map (Filter (FlatMap $39 (lambda '($40) (Map (Take (Nth $40 '1) (Uint64 '1)) (lambda '($41) (AsStruct '('"Fk" (Member $41 '"Fk"))))))) (lambda '($42) (Exists (Member $42 '"Fk")))) (lambda '($43) (AsStruct '('"Key" (Member $43 '"Fk"))))))))) $18)) (let $20 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $21 (KqpCnStreamLookup (TDqOutput $19 '0) $20 '('"Key" '"Value") (ListType (StructType $13)) '('('"Strategy" '"LookupRows")))) (let $22 (StructType $13 $15)) (let $23 '('('"_logical_id" '965) '('"_id" '"91568fe0-325c1d41-12d25b70-89512dbf") '('"_wide_channels" $22))) (let $24 (DqPhyStage '($21) (lambda '($44) (block '( (let $45 (lambda '($47) (block '( (let $48 (Member $47 '"Key")) (return $48 (Member $47 '"Value") (Exists $48)) )))) (let $46 (lambda '($52 $53 $54) $52 $53)) (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow $44) $45) (lambda '($49 $50 $51) $51)) $46))) ))) $23)) (let $25 (DqCnBroadcast (TDqOutput $24 '0))) (let $26 (Uint64 '"1001")) (let $27 (StructType '('"l.Fk" $12) '('"l.Key" $12) '('"l.Value" $14) '('"r.Key" $12) '('"r.Value" $14))) (let $28 '('('"_logical_id" '1094) '('"_id" '"116120b0-520f89c7-ebc2096-af823145") '('"_wide_channels" $27))) (let $29 (DqPhyStage '($25) (lambda '($55) (block '( (let $56 '('Many 'Hashed 'Compact)) (let $57 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $55) (lambda '($60 $61) (Exists $60))) (lambda '($62 $63) (IfPresent $62 (lambda '($64) (Just '($64 (AsStruct '('"Key" $62) '('"Value" $63))))) (Nothing (OptionalType (TupleType $11 $22)))))) (lambda '($65) (Nth $65 '0)) (lambda '($66) (Nth $66 '1)) $56)) (let $58 (TopSort (FlatMap $57 (lambda '($67) (block '( (let $68 '('"Fk" '"l.Fk" '"Key" '"l.Key" '"Value" '"l.Value")) (let $69 '('"Key" '"r.Key" '"Value" '"r.Value")) (return (MapJoinCore (ToFlow $17) $67 '"Left" '('"Fk") '('"Key") $68 $69 '('"l.Fk") '('"r.Key"))) )))) $26 (Bool 'true) (lambda '($70) (Member $70 '"l.Key")))) (let $59 (lambda '($71) (Member $71 '"l.Fk") (Member $71 '"l.Key") (Member $71 '"l.Value") (Member $71 '"r.Key") (Member $71 '"r.Value"))) (return (FromFlow (ExpandMap $58 $59))) ))) $28)) (let $30 (DqCnMerge (TDqOutput $29 '0) '('('1 '"Asc")))) (let $31 (DqPhyStage '($30) (lambda '($72) (FromFlow (NarrowMap (Take (ToFlow $72) $26) (lambda '($73 $74 $75 $76 $77) (AsStruct '('"l.Fk" $73) '('"l.Key" $74) '('"l.Value" $75) '('"r.Key" $76) '('"r.Value" $77)))))) '('('"_logical_id" '1107) '('"_id" '"f443f003-5c47554f-11b911f7-b49fd74a")))) (let $32 '($19 $24 $29 $31)) (let $33 '('"l.Key" '"l.Fk" '"l.Value" '"r.Key" '"r.Value")) (let $34 (DqCnResult (TDqOutput $31 '0) $33)) (let $35 (KqpTxResultBinding $16 '0 '0)) (let $36 (KqpPhysicalTx $32 '($34) '('($10 $35)) $8)) (return (KqpPhysicalQuery '($9 $36) '((KqpTxResultBinding (ListType $27) '1 '0)) '('('"type" '"data_query")))) ) >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] Test command err: 2025-11-03T14:03:49.637371Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:49.661531Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:49.663457Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:49.663525Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:49.663549Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0020fa/r3tmp/tmpADm6ne/pdisk_1.dat 2025-11-03T14:03:49.720536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:49.720568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:49.728588Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:49.729591Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178629302712 != 1762178629302716 2025-11-03T14:03:49.760718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:49.807907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:49.841569Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:49.925749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:49.940455Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:681:2569]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:03:49.940644Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:681:2569]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:03:49.940694Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:681:2569] 2025-11-03T14:03:49.940735Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:49.941503Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:667:2561], Recipient [1:685:2572]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:03:49.949301Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:681:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:03:49.949410Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:667:2561], Recipient [1:685:2572]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:03:49.949529Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:685:2572] 2025-11-03T14:03:49.949569Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:49.950575Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:667:2561], Recipient [1:685:2572]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:03:49.950980Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:49.951019Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:49.951128Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:49.951135Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:49.951140Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:49.951177Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:49.951198Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:49.951206Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:712:2569] in generation 1 2025-11-03T14:03:49.951257Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:49.951267Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:49.951334Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-11-03T14:03:49.951338Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-11-03T14:03:49.951342Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-11-03T14:03:49.951358Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:49.951365Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:49.951370Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:713:2572] in generation 1 2025-11-03T14:03:49.961693Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:49.965817Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:49.965893Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:49.965929Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:716:2590] 2025-11-03T14:03:49.965936Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:49.965941Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:49.965947Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:49.965981Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:49.965988Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-11-03T14:03:49.965999Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:49.966008Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:717:2591] 2025-11-03T14:03:49.966012Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-11-03T14:03:49.966016Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-11-03T14:03:49.966021Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:49.966089Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:681:2569], Recipient [1:681:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:49.966097Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:49.966126Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:685:2572], Recipient [1:685:2572]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:49.966130Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:49.966238Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:49.966261Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:49.966266Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2025-11-03T14:03:49.966275Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-11-03T14:03:49.966290Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:49.966296Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:49.966303Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-11-03T14:03:49.966308Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-11-03T14:03:49.966313Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-11-03T14:03:49.966318Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:49.966324Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:49.966330Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2025-11-03T14:03:49.966334Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:49.966338Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037889 2025-11-03T14:03:49.966342Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-11-03T14:03:49.966346Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037889 2025-11-03T14:03:49.966349Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: ... eat] Emit heartbeats: at tablet# 72075186224037888 ... bulk upsert finished with status GENERIC_ERROR 2025-11-03T14:03:56.248463Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [3:67:2114] Handle TEvExecuteKqpTransaction 2025-11-03T14:03:56.248506Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [3:67:2114] TxId# 281474976715662 ProcessProposeKqpTransaction 2025-11-03T14:03:56.249013Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715662. Ctx: { TraceId: 01k950g6e17nb7tw39t64ad5gd, Database: , SessionId: ydb://session/3?node_id=3&id=ZWVlYzBlZTUtNzQ5NDU0ODAtOWI0YmNhZDAtZWU5ZTZjOTk=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:56.249655Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [3:1199:2981], Recipient [3:676:2567]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-11-03T14:03:56.249759Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2561: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-11-03T14:03:56.249769Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v21000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v21000/18446744073709551615 ImmediateWriteEdgeReplied# v21000/18446744073709551615 2025-11-03T14:03:56.249775Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2664: 72075186224037888 changed HEAD read to non-repeatable v21000/18446744073709551615 2025-11-03T14:03:56.249783Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:11] at 72075186224037888 on unit CheckRead 2025-11-03T14:03:56.249797Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:11] at 72075186224037888 is Executed 2025-11-03T14:03:56.249800Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:11] at 72075186224037888 executing on unit CheckRead 2025-11-03T14:03:56.249804Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:11] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-11-03T14:03:56.249818Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:11] at 72075186224037888 on unit BuildAndWaitDependencies 2025-11-03T14:03:56.249829Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:11] at 72075186224037888 2025-11-03T14:03:56.249846Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:11] at 72075186224037888 is Executed 2025-11-03T14:03:56.249849Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:11] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-11-03T14:03:56.249853Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:11] at 72075186224037888 to execution unit ExecuteRead 2025-11-03T14:03:56.249863Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:11] at 72075186224037888 on unit ExecuteRead 2025-11-03T14:03:56.249876Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1657: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-11-03T14:03:56.249954Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2272: 72075186224037888 Complete read# {[3:1199:2981], 0} after executionsCount# 1 2025-11-03T14:03:56.249963Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2246: 72075186224037888 read iterator# {[3:1199:2981], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-11-03T14:03:56.249994Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2297: 72075186224037888 read iterator# {[3:1199:2981], 0} finished in read 2025-11-03T14:03:56.250001Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:11] at 72075186224037888 is Executed 2025-11-03T14:03:56.250007Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:11] at 72075186224037888 executing on unit ExecuteRead 2025-11-03T14:03:56.250010Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:11] at 72075186224037888 to execution unit CompletedOperations 2025-11-03T14:03:56.250012Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:11] at 72075186224037888 on unit CompletedOperations 2025-11-03T14:03:56.250020Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:11] at 72075186224037888 is Executed 2025-11-03T14:03:56.250022Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:11] at 72075186224037888 executing on unit CompletedOperations 2025-11-03T14:03:56.250025Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:11] at 72075186224037888 has finished 2025-11-03T14:03:56.250029Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2796: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-11-03T14:03:56.250051Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2845: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-11-03T14:03:56.250331Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [3:1199:2981], Recipient [3:676:2567]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-11-03T14:03:56.250339Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3519: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } 2025-11-03T14:03:56.269325Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [3:67:2114] Handle TEvExecuteKqpTransaction 2025-11-03T14:03:56.269344Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [3:67:2114] TxId# 281474976715663 ProcessProposeKqpTransaction 2025-11-03T14:03:56.269507Z node 3 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715663. Ctx: { TraceId: 01k950g6ewekatepr2r6xrx1gm, Database: , SessionId: ydb://session/3?node_id=3&id=NDE0Zjk0NTgtZDY0MjdiMmMtNzNkODY0ZjktNDI5MDliOTU=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:03:56.270103Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [3:1229:3005], Recipient [3:913:2728]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 1 2025-11-03T14:03:56.270173Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2561: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-11-03T14:03:56.270187Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037889 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v5000/18446744073709551615 ImmediateWriteEdgeReplied# v5000/18446744073709551615 2025-11-03T14:03:56.270193Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2664: 72075186224037889 changed HEAD read to non-repeatable v21000/18446744073709551615 2025-11-03T14:03:56.270200Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-11-03T14:03:56.270218Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-11-03T14:03:56.270222Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-11-03T14:03:56.270225Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-11-03T14:03:56.270234Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-11-03T14:03:56.270246Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2025-11-03T14:03:56.270257Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-11-03T14:03:56.270260Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-11-03T14:03:56.270262Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-11-03T14:03:56.270265Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-11-03T14:03:56.270276Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1657: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-11-03T14:03:56.270338Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2272: 72075186224037889 Complete read# {[3:1229:3005], 0} after executionsCount# 1 2025-11-03T14:03:56.270344Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2246: 72075186224037889 read iterator# {[3:1229:3005], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-11-03T14:03:56.270356Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2297: 72075186224037889 read iterator# {[3:1229:3005], 0} finished in read 2025-11-03T14:03:56.270362Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-11-03T14:03:56.270365Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-11-03T14:03:56.270367Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-11-03T14:03:56.270370Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-11-03T14:03:56.270377Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-11-03T14:03:56.270380Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-11-03T14:03:56.270383Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:4] at 72075186224037889 has finished 2025-11-03T14:03:56.270386Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2796: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-11-03T14:03:56.270410Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2845: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-11-03T14:03:56.270701Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [3:1229:3005], Recipient [3:913:2728]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-11-03T14:03:56.270711Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3519: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsSimple-ColumnStore Test command err: Trying to start YDB, gRPC: 14866, MsgBus: 29649 2025-11-03T14:03:51.090063Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499590425134824:2139];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:51.090180Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e41/r3tmp/tmphtcpSm/pdisk_1.dat 2025-11-03T14:03:51.145547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:51.150738Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:51.150892Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499590425134723:2081] 1762178631088937 != 1762178631088940 TServer::EnableGrpc on GrpcPort 14866, node 1 2025-11-03T14:03:51.157456Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:51.157471Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:51.157473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:51.157528Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29649 TClient is connected to server localhost:29649 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-11-03T14:03:51.199211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:51.199246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:51.199838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:51.207695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:51.333117Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:51.556840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499590425135412:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.556844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499590425135401:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.556863Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.556933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499590425135416:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.556949Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:51.557670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:51.559736Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499590425135415:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:51.626208Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499590425135468:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:51.668494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.681422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.690291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.704273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.718408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.749798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.757410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.767273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.781045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.787828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.795039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.809324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:51.823039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose its ... -11-03T14:03:53.096748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:53.096771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:53.096779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:53.096804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:53.096814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:53.096828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:53.096836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:53.096846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:53.096855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:53.096865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:53.096874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:53.096908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:53.096918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:53.096934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:53.096943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:53.096953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:53.096962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:53.096972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:53.096982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:53.096994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:53.097003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:53.097017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:53.097025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:53.097176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:53.097187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:53.097202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:53.097212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:53.097241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:53.097250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:53.097266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:53.097275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:53.097288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:53.097296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:53.097302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:53.097312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:53.097345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:53.097354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:53.097371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:53.097379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:53.097390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:53.097399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:53.097409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:53.097418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:53.097434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:53.097443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:53.097457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:53.097468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:03:52.534376Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7F6308771AC2 13. ??:0: ?? @ 0x7F63088038BF >> test.py::test[aggregate-compare_by_nulls-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_expr_alias_on_subexp--Results] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] Test command err: 2025-11-03T14:03:35.153314Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:35.179604Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:35.181195Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:35.181255Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:35.181279Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001d9c/r3tmp/tmpiDpOaH/pdisk_1.dat 2025-11-03T14:03:35.236789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:35.236820Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:35.248586Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:35.249578Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178614728078 != 1762178614728082 2025-11-03T14:03:35.280689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:35.327186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:35.360859Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:35.443967Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-11-03T14:03:35.443997Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-11-03T14:03:35.444028Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:650:2545] 2025-11-03T14:03:35.458053Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:650:2545] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-11-03T14:03:35.458086Z node 1 :TX_PROXY DEBUG: schemereq.cpp:616: Actor# [1:650:2545] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-11-03T14:03:35.458247Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1741: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-11-03T14:03:35.458258Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1731: Actor# [1:650:2545] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-11-03T14:03:35.458305Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1564: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-11-03T14:03:35.458329Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1611: Actor# [1:650:2545] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-11-03T14:03:35.458340Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:650:2545] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-11-03T14:03:35.458393Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1466: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvClientConnected 2025-11-03T14:03:35.458736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:35.458946Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1488: Actor# [1:650:2545] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-11-03T14:03:35.458955Z node 1 :TX_PROXY DEBUG: schemereq.cpp:596: Actor# [1:650:2545] txid# 281474976715657 SEND to# [1:590:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-11-03T14:03:35.472653Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:03:35.472842Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:03:35.472888Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:674:2566] 2025-11-03T14:03:35.472940Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:35.480433Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:03:35.480606Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:35.480634Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:35.480804Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:35.480814Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:35.480822Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:35.480883Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:35.480907Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:35.480919Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-11-03T14:03:35.480995Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:35.487876Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:35.487942Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:35.487959Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-11-03T14:03:35.487963Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:35.487967Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:35.487970Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:35.488022Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:674:2566], Recipient [1:674:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:35.488028Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:35.488103Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:35.488118Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:35.488186Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:35.488192Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:35.488197Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-11-03T14:03:35.488200Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-11-03T14:03:35.488205Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-11-03T14:03:35.488209Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:35.488213Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:35.488222Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:681:2570], Recipient [1:674:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:35.488225Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:35.488230Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:673:2565], serverId# [1:681:2570], sessionId# [0:0:0] 2025-11-03T14:03:35.488238Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:681:2570] 2025-11-03T14:03:35.488241Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-11-03T14:03:35.488256Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:35.488295Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-11-03T14:03:35.488304Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:35.488314Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:35.488327Z node ... 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-11-03T14:03:56.469395Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2025-11-03T14:03:56.469408Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287425, Sender [13:761:2627], Recipient [13:675:2566]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-11-03T14:03:56.469413Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3176: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-11-03T14:03:56.469417Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2025-11-03T14:03:56.469424Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-11-03T14:03:56.469460Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:714: Complete [3001 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [13:979:2766], exec latency: 0 ms, propose latency: 0 ms 2025-11-03T14:03:56.469493Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287425, Sender [13:675:2566], Recipient [13:761:2627]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-11-03T14:03:56.469498Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3176: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-11-03T14:03:56.469503Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-11-03T14:03:56.469510Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-11-03T14:03:56.469529Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:714: Complete [3001 : 281474976715663] from 72075186224037889 at tablet 72075186224037889 send result to client [13:979:2766], exec latency: 0 ms, propose latency: 0 ms TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 294 } } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-1" WriteRows: 1 WriteBytes: 8 } } } CommitVersion { Step: 3001 TxId: 281474976715663 } TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 162 } } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-2" WriteRows: 1 WriteBytes: 8 } } } CommitVersion { Step: 3001 TxId: 281474976715663 } 2025-11-03T14:03:56.469739Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:56.469782Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:03:56.469822Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:56.471085Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2025-11-03T14:03:56.471122Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [13:675:2566], Recipient [13:761:2627]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-11-03T14:03:56.471131Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-11-03T14:03:56.471139Z node 13 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2025-11-03T14:03:56.473945Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2025-11-03T14:03:56.474014Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [13:761:2627], Recipient [13:675:2566]: {TEvReadSet step# 3001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-11-03T14:03:56.474024Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-11-03T14:03:56.474033Z node 13 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2025-11-03T14:03:56.495890Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:67:2114] Handle TEvExecuteKqpTransaction 2025-11-03T14:03:56.495923Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:67:2114] TxId# 281474976715667 ProcessProposeKqpTransaction 2025-11-03T14:03:56.496152Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715667. Ctx: { TraceId: 01k950g6nq348j6gh47atdf74v, Database: , SessionId: ydb://session/3?node_id=13&id=NjZhNTY2YzctNGNjNTlhM2EtMjYwMGVjOC1jN2Y1OTQ5Mg==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2025-11-03T14:03:56.496848Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [13:1089:2872], Recipient [13:675:2566]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-11-03T14:03:56.496905Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2561: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-11-03T14:03:56.496918Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v4000/18446744073709551615 ImmediateWriteEdge# v4001/0 ImmediateWriteEdgeReplied# v4001/0 2025-11-03T14:03:56.496928Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2664: 72075186224037888 changed HEAD read to non-repeatable v4001/18446744073709551615 2025-11-03T14:03:56.496942Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-11-03T14:03:56.496966Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is Executed 2025-11-03T14:03:56.496972Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-11-03T14:03:56.496984Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-11-03T14:03:56.496989Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-11-03T14:03:56.497007Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2025-11-03T14:03:56.497013Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is Executed 2025-11-03T14:03:56.497018Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-11-03T14:03:56.497023Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-11-03T14:03:56.497028Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-11-03T14:03:56.497046Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1657: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-11-03T14:03:56.497134Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2272: 72075186224037888 Complete read# {[13:1089:2872], 0} after executionsCount# 1 2025-11-03T14:03:56.497144Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2246: 72075186224037888 read iterator# {[13:1089:2872], 0} sends rowCount# 2, bytes# 96, quota rows left# 999, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-11-03T14:03:56.497164Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2297: 72075186224037888 read iterator# {[13:1089:2872], 0} finished in read 2025-11-03T14:03:56.497176Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is Executed 2025-11-03T14:03:56.497180Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-11-03T14:03:56.497185Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-11-03T14:03:56.497190Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-11-03T14:03:56.497206Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:7] at 72075186224037888 is Executed 2025-11-03T14:03:56.497210Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-11-03T14:03:56.497215Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:7] at 72075186224037888 has finished 2025-11-03T14:03:56.497221Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2796: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-11-03T14:03:56.497245Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2845: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-11-03T14:03:56.497460Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [13:1089:2872], Recipient [13:675:2566]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-11-03T14:03:56.497471Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3519: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } |56.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[join-full_trivial--Results] [GOOD] >> KqpIndexLookupJoin::JoinInclusionTestSemiJoin+StreamLookupJoin >> test.py::test[in-in_ansi_join--Results] [GOOD] >> test.py::test[in-in_enum_single1-default.txt-Results] >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly >> test.py::test[pg-tpcds-q90-default.txt-ForceBlocks] [GOOD] >> test.py::test[pg-tpcds-q90-default.txt-Results] >> KqpJoinOrder::FiveWayJoinWithComplexPreds-ColumnStore ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+ColumnStore Test command err: Trying to start YDB, gRPC: 1473, MsgBus: 62161 2025-11-03T14:03:52.679841Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499596416894155:2068];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:52.680155Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e34/r3tmp/tmpcRa07N/pdisk_1.dat 2025-11-03T14:03:52.743322Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:52.743545Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:52.743715Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499596416894127:2081] 1762178632679307 != 1762178632679310 TServer::EnableGrpc on GrpcPort 1473, node 1 2025-11-03T14:03:52.749549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:52.749563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:52.749565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:52.749604Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62161 TClient is connected to server localhost:62161 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-11-03T14:03:52.788164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:52.788196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:52.789263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:52.792366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:52.963384Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:53.031177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499600711862102:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.031188Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499600711862114:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.031203Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.031288Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499600711862117:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.031306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.031974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:53.033722Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499600711862116:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:53.124307Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499600711862169:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:53.174801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-11-03T14:03:53.197792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499600711862389:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:53.197839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499600711862389:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:53.197869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499600711862389:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:53.197894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499600711862389:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:03:53.197921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499600711862389:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:03:53.197946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499600711862389:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:03:53.197966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499600711862389:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:03:53.197987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499600711862389:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:03:53.198001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499600711862389:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:03:53.198018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499600711862389:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:03:53.198039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499600711862389:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:03:53.198060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499600711862389:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:03:53.198079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499600711862389:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:03:53.198101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499600711862388:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:53.198126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499600711862388:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:53.198168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499600711862388:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:53.198194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499600711862388:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_ ... tion=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:53.229531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:53.229539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:53.229542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:53.229548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:53.229555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:53.229650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:53.229657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:53.229664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:53.229666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:53.229678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:53.229684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:53.229692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:53.229696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:53.229702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:53.229706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:53.229717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:53.229721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:53.229743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:53.229750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:53.229787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:53.229796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:53.229801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:53.229806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:53.229811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:53.229821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:53.229834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:53.229844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:53.229852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:53.229856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:53.231757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499600711862389:2337];ev=NActors::IEventHandle;tablet_id=72075186224037889;tx_id=281474976715660;this=54798281065264;method=TTxController::StartProposeOnExecute;tx_info=281474976715660:TX_KIND_SCHEMA;min=1762178633230;max=18446744073709551615;plan=0;src=[1:7568499596416894474:2147];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-11-03T14:03:53.231939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499600711862440:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:53.231958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499600711862440:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:53.231992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499600711862440:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:53.232017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499600711862440:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:03:53.232042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499600711862440:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:03:53.232064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499600711862440:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:03:53.232089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499600711862440:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:03:53.232113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499600711862440:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:03:53.232136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499600711862440:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:03:53.232160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499600711862440:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:03:53.232183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499600711862440:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:03:53.232210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499600711862440:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:03:53.232233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499600711862440:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; VERIFY failed (2025-11-03T14:03:53.232073Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7FEDAD273AC2 13. ??:0: ?? @ 0x7FEDAD3058BF >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+ColumnStore >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup >> KqpJoinOrder::TPCH9_100 >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin >> test.py::test[join-nopushdown_filter_over_inner--Results] [GOOD] >> test.py::test[join-nopushdown_filter_over_inner-off-ForceBlocks] >> test.py::test[aggregate-group_by_gs_grouping--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_grouping_hum--Results] >> KqpIndexLookupJoin::LeftJoinNonPkJoinConditionsWithCast+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::LeftJoinNonPkJoinConditionsWithCast-StreamLookupJoin >> test.py::test[select-column_labels-default.txt-Results] [GOOD] >> OlapEstimationRowsCorrectness::TPCH10 >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup >> KqpJoin::RightSemiJoin_FullScan ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS16-ColumnStore Test command err: Trying to start YDB, gRPC: 63129, MsgBus: 10013 2025-11-03T14:03:52.999285Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499597581294313:2066];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:52.999440Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e33/r3tmp/tmputoMoP/pdisk_1.dat 2025-11-03T14:03:53.048936Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:53.059718Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:53.059916Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499597581294287:2081] 1762178632999025 != 1762178632999028 TServer::EnableGrpc on GrpcPort 63129, node 1 2025-11-03T14:03:53.064838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:53.064849Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:53.064851Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:53.064884Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10013 2025-11-03T14:03:53.108307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:53.108340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:53.109365Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:53.120231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:53.325492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:53.410425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499601876262266:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.410430Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499601876262261:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.410444Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.410505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499601876262276:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.410518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:53.411311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:53.413042Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499601876262275:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:53.472605Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499601876262328:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:53.514340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.529097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.535953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.545050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.551918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.579884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.586938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.593838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.607901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.622219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.636183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.642900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:53.649680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose its ... -11-03T14:03:54.768566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:54.768578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:54.768586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:54.768611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:54.768623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:54.768635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:54.768640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:54.768648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:54.768654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:54.768661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:54.768665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:54.768700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:54.768710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:54.768723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:54.768732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:54.768739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:54.768748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:54.768755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:54.768764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:54.768778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:54.768790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:54.768805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:54.768814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:54.768943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:54.768956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:54.768968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:54.768978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:54.769011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:54.769022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:54.769034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:54.769045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:54.769058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:54.769063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:54.769069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:54.769076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:54.769115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:54.769128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:54.769140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:54.769151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:54.769173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:54.769182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:54.769193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:54.769201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:54.769213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:54.769222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:54.769232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:54.769240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:03:54.286748Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7F60FD179AC2 13. ??:0: ?? @ 0x7F60FD20B8BF >> test.py::test[blocks-date_greater_scalar--Results] [GOOD] >> test.py::test[blocks-pg_sort--ForceBlocks] >> test.py::test[pg-tpcds-q90-default.txt-Results] [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup >> KqpIndexLookupJoin::JoinInclusionTestSemiJoin+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinInclusionTestSemiJoin-StreamLookupJoin >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-ForceBlocks] [GOOD] >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-Results] >> test.py::test[ypath-multi_key-default.txt-ForceBlocks] [GOOD] >> test.py::test[ypath-multi_key-default.txt-Results] >> test.py::test[join-bush_dis_in_in_in--ForceBlocks] [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::SortingsWithLookupJoin3+RemoveLimitOperator Test command err: Trying to start YDB, gRPC: 1724, MsgBus: 4994 2025-11-03T14:03:53.762315Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499601397193411:2068];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:53.762573Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e2a/r3tmp/tmprwX8Of/pdisk_1.dat 2025-11-03T14:03:53.819965Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:53.821109Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:53.821336Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499601397193383:2081] 1762178633761864 != 1762178633761867 TServer::EnableGrpc on GrpcPort 1724, node 1 2025-11-03T14:03:53.832634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:53.832650Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:53.832652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:53.832698Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4994 2025-11-03T14:03:53.865882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:53.865914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:53.866954Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4994 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:53.888569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:54.096515Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:54.180582Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499605692161366:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.180587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499605692161355:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.180603Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.180674Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499605692161370:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.180690Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.181455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:54.183140Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499605692161369:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:54.281289Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499605692161422:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:54.316322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.332068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.339253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.349834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.356718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.377888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.384691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.391694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.398809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.405443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.412807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.419958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.426691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself i ... -11-03T14:03:55.241896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:55.241909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:55.241919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:55.241945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:55.241954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:55.241967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:55.241977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:55.241990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:55.242000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:55.242010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:55.242019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:55.242047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:55.242057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:55.242076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:55.242085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:55.242092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:55.242101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:55.242107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:55.242117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:55.242131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:55.242140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:55.242156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:55.242164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:55.242232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:55.242242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:55.242254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:55.242264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:55.242289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:55.242299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:55.242316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:55.242325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:55.242337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:55.242346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:55.242357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:55.242367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:55.242390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:55.242400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:55.242419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:55.242429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:55.242440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:55.242449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:55.242460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:55.242469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:55.242487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:55.242496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:55.242509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:55.242519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:03:55.074319Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7F12F796AAC2 13. ??:0: ?? @ 0x7F12F79FC8BF |56.0%| [TA] $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH15 >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad [GOOD] >> TopicAutoscaling::ControlPlane_CDC >> test.py::test[ypath-direct_read_from_dynamic--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness1+EnableSeparationComputeActorsFromRead Test command err: Trying to start YDB, gRPC: 14186, MsgBus: 10774 2025-11-03T14:03:53.772419Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499602685056333:2067];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:53.772514Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e29/r3tmp/tmpxjn8aW/pdisk_1.dat 2025-11-03T14:03:53.816431Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:53.816931Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:53.817121Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499602685056306:2081] 1762178633772145 != 1762178633772148 TServer::EnableGrpc on GrpcPort 14186, node 1 2025-11-03T14:03:53.825005Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:53.825019Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:53.825021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:53.825058Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:10774 TClient is connected to server localhost:10774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-11-03T14:03:53.879488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:53.879515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:53.880595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:53.881432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:53.996869Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:54.159774Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499606980024279:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.159778Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499606980024290:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.159797Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.159846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499606980024294:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.159859Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.160521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:54.162288Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499606980024293:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:54.216160Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499606980024346:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:54.252031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.264616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.273076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.287325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.301195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.331820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.339290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.350154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.364102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.378023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.392382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.406466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:54.419816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose its ... -11-03T14:03:55.165759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:55.165771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:55.165780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:55.165801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:55.165811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:55.165822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:55.165831Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:55.165839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:55.165848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:55.165854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:55.165863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:55.165885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:55.165894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:55.165906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:55.165916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:55.165922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:55.165932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:55.165938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:55.165948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:55.165961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:55.165971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:55.165982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:55.165991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037958;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:55.166065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:55.166077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:55.166088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:55.166097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:55.166116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:55.166125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:55.166137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:55.166145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:55.166152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:55.166162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:55.166168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:55.166178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:55.166200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:55.166210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:55.166223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:55.166241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:55.166252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:55.166258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:55.166268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:55.166272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:55.166290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:55.166299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:55.166309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:55.166317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037970;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:03:55.160792Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7FD55F36EAC2 13. ??:0: ?? @ 0x7FD55F4008BF |56.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[select-column_labels-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint2-ColumnStore Test command err: Trying to start YDB, gRPC: 21056, MsgBus: 29974 2025-11-03T14:03:54.592842Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499606741744802:2074];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:54.593096Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e28/r3tmp/tmpYG8zzR/pdisk_1.dat 2025-11-03T14:03:54.633604Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:54.633877Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:54.633983Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499606741744762:2081] 1762178634591849 != 1762178634591852 TServer::EnableGrpc on GrpcPort 21056, node 1 2025-11-03T14:03:54.638871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:54.638881Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:54.638882Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:54.638914Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29974 TClient is connected to server localhost:29974 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:54.679516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:54.695506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:54.695531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:54.696613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:54.793613Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:54.980276Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499606741745451:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.980278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499606741745440:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.980305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.980392Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499606741745455:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.980413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:54.980928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:54.982306Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499606741745454:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:55.051665Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499611036712803:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:55.102526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.117983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.124899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.134611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.148413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.179959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.187756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.197971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.213838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.226764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.240521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.253164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:55.260074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose its ... -11-03T14:03:55.891517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:55.891533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:55.891539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:55.891585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:55.891599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:55.891610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:55.891621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:55.891626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:55.891633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:55.891639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:55.891643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:55.891644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:55.891649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:55.891657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:55.891663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:55.891663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:55.891669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:55.891679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:55.891684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:55.891695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:55.891696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:55.891699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:55.891714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:55.891725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:55.891730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:55.891737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:55.891744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:55.891749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:55.891755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:55.891755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:55.891760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:55.891765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:55.891767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:55.891770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:55.891773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:55.891775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:55.891780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:55.891785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:55.891789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:55.891790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:55.891794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:55.891800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:55.891804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:55.891805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:55.891810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:55.891815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:55.891818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:03:55.894208Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7FAE96943AC2 13. ??:0: ?? @ 0x7FAE969D58BF >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup >> KqpJoin::RightSemiJoin_FullScan [GOOD] >> KqpJoin::RightSemiJoin_KeyPrefix >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 28966, MsgBus: 31486 2025-11-03T14:03:56.896020Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499612951240944:2067];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:56.896238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e21/r3tmp/tmpotaNGI/pdisk_1.dat 2025-11-03T14:03:56.939279Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:56.939651Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:56.939838Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499612951240917:2081] 1762178636895753 != 1762178636895756 TServer::EnableGrpc on GrpcPort 28966, node 1 2025-11-03T14:03:56.948615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:56.948632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:56.948634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:56.948683Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31486 TClient is connected to server localhost:31486 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:56.999831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:56.999861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:57.001047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:57.006790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:57.015603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:57.033205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:57.055093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:57.067065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:57.171018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:57.238445Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499617246209870:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.238481Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.238563Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499617246209880:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.238576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.291202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.298792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.311750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.325849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.340164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.354066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.367610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.382591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.402273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499617246210743:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.402293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499617246210748:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.402305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.402372Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499617246210751:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.402387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.403166Z node 1 :FLAT_TX_SCHEMESHARD ... pp:228: got bad distributable configuration TClient is connected to server localhost:27441 TClient is connected to server localhost:27441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:58.130445Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:58.150461Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.160979Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.179401Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.191333Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.269771Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:58.466088Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499622754084263:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.466144Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.469593Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499622754084346:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.469676Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.470392Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.478599Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.487533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.501662Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.516262Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.529830Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.543579Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.558067Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.574065Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499622754085136:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.574094Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.574121Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499622754085141:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.574131Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499622754085142:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.574140Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.574772Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:58.577440Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7568499622754085145:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-11-03T14:03:58.668075Z node 2 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [2:7568499622754085197:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:58.910939Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.918003Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.073735Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |56.0%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[join-bush_dis_in_in_in--ForceBlocks] [GOOD] >> KqpIndexLookupJoin::LeftJoinNonPkJoinConditionsWithCast-StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup >> test.py::test[tpch-q21-default.txt-Results] [GOOD] >> test.py::test[tpch-q6-default.txt-Results] >> test.py::test[blocks-interval_mul_scalar--Results] [GOOD] >> test.py::test[column_group-hint_anon_groups-single-Results] [SKIPPED] >> test.py::test[count-count_all_view_concat--Results] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> CommitOffset::DistributedTxCommit_LongReadSession [GOOD] >> TPersQueueMirrorer::TestBasicRemote ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinNonPkJoinConditionsWithCast-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 14396, MsgBus: 16671 2025-11-03T14:03:56.640299Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499614436801170:2067];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:56.640332Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e25/r3tmp/tmpJoGzkP/pdisk_1.dat 2025-11-03T14:03:56.672201Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:56.684129Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:56.684309Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499614436801134:2081] 1762178636640031 != 1762178636640034 TServer::EnableGrpc on GrpcPort 14396, node 1 2025-11-03T14:03:56.692372Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:56.692387Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:56.692389Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:56.692441Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:16671 TClient is connected to server localhost:16671 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:56.743887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:56.743926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:56.744944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:56.749157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:56.784367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:56.803242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:56.821198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:56.832958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:56.916507Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:57.008535Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499618731770076:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.008563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.008641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499618731770086:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.008647Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.057851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.066558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.080402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.094235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.108453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.122182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.136351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.153983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.168447Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499618731770947:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.168483Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.168491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499618731770952:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.168549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499618731770954:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.168564Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.169377Z node 1 :FLAT_TX_SCHEMESHARD ... ome/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.068705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.082844Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.096692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.111015Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.124644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.138810Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.153816Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.162804Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:59.184398Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.194753Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"a" (OptionalType (DataType 'Int32))) '('"b" (OptionalType (DataType 'Int32)))))) (let $1 (KqpTable '"/Root/A" '"72057594046644480:21" '"" '1)) (let $2 '('"a" '"b")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 '('"_partition_mode" '"single")) (let $5 '('('"_logical_id" '1197) '('"_id" '"c5de605-c1900042-556757a4-67e115a2") $4)) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($35) $35) $5)) (let $7 (DqCnResult (TDqOutput $6 '0) '())) (let $8 '('('"type" '"generic"))) (let $9 (KqpPhysicalTx '($6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (DataType 'Int32)) (let $12 (OptionalType $11)) (let $13 '('"a" $12)) (let $14 (ListType (StructType $13 '('"b" $12)))) (let $15 %kqp%tx_result_binding_0_0) (let $16 '('('"_logical_id" '933) '('"_id" '"72178b91-52dea578-e559b6d2-bc54d317") $4)) (let $17 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $15 (lambda '($36) (Member $36 '"a")) (Void) (Void) (lambda '($37) (OrderedFilter (FlatMap $37 (lambda '($38) (Map (Take (Nth $38 '1) (Uint64 '1)) (lambda '($39) (AsStruct '('"a" (Member $39 '"a"))))))) (lambda '($40) (Exists (Member $40 '"a")))))))) $16)) (let $18 (KqpTable '"/Root/D" '"72057594046644480:24" '"" '1)) (let $19 (KqpCnStreamLookup (TDqOutput $17 '0) $18 $2 (ListType (StructType $13)) '('('"Strategy" '"LookupRows")))) (let $20 (OptionalType (DataType 'Int16))) (let $21 (StructType $13 '('"b" $20))) (let $22 '('('"_logical_id" '1015) '('"_id" '"a7286c20-838a0881-f5c034b4-32345bba") '('"_wide_channels" $21))) (let $23 (DqPhyStage '($19) (lambda '($41) (block '( (let $42 (lambda '($44) (block '( (let $45 (Member $44 '"a")) (let $46 (Member $44 '"b")) (return $45 $46 (And (Exists $45) (Exists $46))) )))) (let $43 (lambda '($50 $51 $52) $50 $51)) (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow $41) $42) (lambda '($47 $48 $49) $49)) $43))) ))) $22)) (let $24 (DqCnBroadcast (TDqOutput $23 '0))) (let $25 (StructType '('"A.a" $12) '('"A.b" $12) '('"D.a" $12) '('"D.b" $20))) (let $26 '('('"_logical_id" '1150) '('"_id" '"3e913749-1bf6cd72-605113e7-b0469bd9") '('"_wide_channels" $25))) (let $27 (DqPhyStage '($24) (lambda '($53) (block '( (let $54 '('Many 'Hashed 'Compact)) (let $55 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $53) (lambda '($59 $60) (And (Exists $59) (Exists $60)))) (lambda '($61 $62) (block '( (let $63 (IfPresent $62 (lambda '($64) (Just (SafeCast $64 $11))) (Nothing $12))) (return (IfPresent (FilterNullElements (Just '($61 $63)) '('0 '1)) (lambda '($65) (Just '($65 (AsStruct '('"a" $61) '('"b" $62))))) (Nothing (OptionalType (TupleType (TupleType $11 $11) $21))))) )))) (lambda '($66) (Nth $66 '0)) (lambda '($67) (Nth $67 '1)) $54)) (let $56 (Bool 'true)) (let $57 (Sort (FlatMap $55 (lambda '($68) (block '( (let $69 '('"a" '"A.a" '"b" '"A.b")) (let $70 '('"a" '"D.a" '"b" '"D.b")) (return (MapJoinCore (ToFlow $15) $68 '"Left" $2 $2 $69 $70 '('"A.a" '"A.b") '('"D.a" '"D.b"))) )))) '($56 $56) (lambda '($71) '((Member $71 '"A.a") (Member $71 '"A.b"))))) (let $58 (lambda '($72) (Member $72 '"A.a") (Member $72 '"A.b") (Member $72 '"D.a") (Member $72 '"D.b"))) (return (FromFlow (ExpandMap $57 $58))) ))) $26)) (let $28 (DqCnMerge (TDqOutput $27 '0) '('('0 '"Asc") '('1 '"Asc")))) (let $29 (DqPhyStage '($28) (lambda '($73) (FromFlow (NarrowMap (ToFlow $73) (lambda '($74 $75 $76 $77) (AsStruct '('"A.a" $74) '('"A.b" $75) '('"D.a" $76) '('"D.b" $77)))))) '('('"_logical_id" '1162) '('"_id" '"2e779680-f9481635-6a836e79-75ec17d6")))) (let $30 '($17 $23 $27 $29)) (let $31 '('"A.a" '"A.b" '"D.a" '"D.b")) (let $32 (DqCnResult (TDqOutput $29 '0) $31)) (let $33 (KqpTxResultBinding $14 '0 '0)) (let $34 (KqpPhysicalTx $30 '($32) '('($10 $33)) $8)) (return (KqpPhysicalQuery '($9 $34) '((KqpTxResultBinding (ListType $25) '1 '0)) '('('"type" '"query")))) ) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"a" (OptionalType (DataType 'Int32))) '('"b" (OptionalType (DataType 'Int32)))))) (let $1 (KqpTable '"/Root/A" '"72057594046644480:21" '"" '1)) (let $2 '('"a" '"b")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 '('"_partition_mode" '"single")) (let $5 '('('"_logical_id" '1230) '('"_id" '"9445fd6-8e69acfa-57ecc397-975460b5") $4)) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($36) $36) $5)) (let $7 (DqCnResult (TDqOutput $6 '0) '())) (let $8 '('('"type" '"data"))) (let $9 (KqpPhysicalTx '($6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (DataType 'Int32)) (let $12 (OptionalType $11)) (let $13 '('"a" $12)) (let $14 (ListType (StructType $13 '('"b" $12)))) (let $15 %kqp%tx_result_binding_0_0) (let $16 '('('"_logical_id" '964) '('"_id" '"d8c3a6c-36c58bf1-9fceabe2-cb2669e5") $4)) (let $17 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $15 (lambda '($37) (Member $37 '"a")) (Void) (Void) (lambda '($38) (OrderedFilter (FlatMap $38 (lambda '($39) (Map (Take (Nth $39 '1) (Uint64 '1)) (lambda '($40) (AsStruct '('"a" (Member $40 '"a"))))))) (lambda '($41) (Exists (Member $41 '"a")))))))) $16)) (let $18 (KqpTable '"/Root/D" '"72057594046644480:24" '"" '1)) (let $19 (KqpCnStreamLookup (TDqOutput $17 '0) $18 $2 (ListType (StructType $13)) '('('"Strategy" '"LookupRows")))) (let $20 (OptionalType (DataType 'Int16))) (let $21 (StructType $13 '('"b" $20))) (let $22 '('('"_logical_id" '1046) '('"_id" '"160968c0-fd05dfb9-3a9cfe7-309d6508") '('"_wide_channels" $21))) (let $23 (DqPhyStage '($19) (lambda '($42) (block '( (let $43 (lambda '($45) (block '( (let $46 (Member $45 '"a")) (let $47 (Member $45 '"b")) (return $46 $47 (And (Exists $46) (Exists $47))) )))) (let $44 (lambda '($51 $52 $53) $51 $52)) (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow $42) $43) (lambda '($48 $49 $50) $50)) $44))) ))) $22)) (let $24 (DqCnBroadcast (TDqOutput $23 '0))) (let $25 (Uint64 '"1001")) (let $26 (StructType '('"A.a" $12) '('"A.b" $12) '('"D.a" $12) '('"D.b" $20))) (let $27 '('('"_logical_id" '1181) '('"_id" '"1c6e210f-77f86027-b0315b66-cee29a69") '('"_wide_channels" $26))) (let $28 (DqPhyStage '($24) (lambda '($54) (block '( (let $55 '('Many 'Hashed 'Compact)) (let $56 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $54) (lambda '($60 $61) (And (Exists $60) (Exists $61)))) (lambda '($62 $63) (block '( (let $64 (IfPresent $63 (lambda '($65) (Just (SafeCast $65 $11))) (Nothing $12))) (return (IfPresent (FilterNullElements (Just '($62 $64)) '('0 '1)) (lambda '($66) (Just '($66 (AsStruct '('"a" $62) '('"b" $63))))) (Nothing (OptionalType (TupleType (TupleType $11 $11) $21))))) )))) (lambda '($67) (Nth $67 '0)) (lambda '($68) (Nth $68 '1)) $55)) (let $57 (Bool 'true)) (let $58 (TopSort (FlatMap $56 (lambda '($69) (block '( (let $70 '('"a" '"A.a" '"b" '"A.b")) (let $71 '('"a" '"D.a" '"b" '"D.b")) (return (MapJoinCore (ToFlow $15) $69 '"Left" $2 $2 $70 $71 '('"A.a" '"A.b") '('"D.a" '"D.b"))) )))) $25 '($57 $57) (lambda '($72) '((Member $72 '"A.a") (Member $72 '"A.b"))))) (let $59 (lambda '($73) (Member $73 '"A.a") (Member $73 '"A.b") (Member $73 '"D.a") (Member $73 '"D.b"))) (return (FromFlow (ExpandMap $58 $59))) ))) $27)) (let $29 (DqCnMerge (TDqOutput $28 '0) '('('0 '"Asc") '('1 '"Asc")))) (let $30 (DqPhyStage '($29) (lambda '($74) (FromFlow (NarrowMap (Take (ToFlow $74) $25) (lambda '($75 $76 $77 $78) (AsStruct '('"A.a" $75) '('"A.b" $76) '('"D.a" $77) '('"D.b" $78)))))) '('('"_logical_id" '1194) '('"_id" '"2999dc6e-456ec2a1-e0a9aa03-1a45bab3")))) (let $31 '($17 $23 $28 $30)) (let $32 '('"A.a" '"A.b" '"D.a" '"D.b")) (let $33 (DqCnResult (TDqOutput $30 '0) $32)) (let $34 (KqpTxResultBinding $14 '0 '0)) (let $35 (KqpPhysicalTx $31 '($33) '('($10 $34)) $8)) (return (KqpPhysicalQuery '($9 $35) '((KqpTxResultBinding (ListType $26) '1 '0)) '('('"type" '"data_query")))) ) |56.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[pg-tpcds-q90-default.txt-Results] [GOOD] |56.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> test.py::test[ypath-multi_key-default.txt-Results] [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup >> test.py::test[produce-process_with_udf_validate_ignore_broken-default.txt-Results] [GOOD] >> test.py::test[ql_filter-integer_escaping--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationOneJoin-EnableSeparationComputeActorsFromRead Test command err: Trying to start YDB, gRPC: 25423, MsgBus: 15507 2025-11-03T14:03:55.756774Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499610103656218:2067];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:55.756812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e27/r3tmp/tmpaBpv3N/pdisk_1.dat 2025-11-03T14:03:55.795300Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:55.795761Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:55.795895Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499610103656191:2081] 1762178635756482 != 1762178635756485 TServer::EnableGrpc on GrpcPort 25423, node 1 2025-11-03T14:03:55.801896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:55.801905Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:55.801907Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:55.801942Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15507 TClient is connected to server localhost:15507 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:55.860269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:55.860297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:55.861410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:55.867720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:55.962960Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:56.227071Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499614398624172:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:56.227066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499614398624163:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:56.227089Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:56.227173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499614398624178:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:56.227192Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:56.227838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:56.229611Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499614398624177:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:56.307855Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499614398624230:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:56.366913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-11-03T14:03:56.396115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499614398624437:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:56.396166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499614398624437:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:56.396220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499614398624437:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:56.396241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499614398624437:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:03:56.396260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499614398624437:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:03:56.396278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499614398624437:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:03:56.396298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499614398624437:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:03:56.396317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499614398624437:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:03:56.396337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499614398624437:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:03:56.396360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499614398624437:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:03:56.396387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499614398624437:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:03:56.396414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499614398624437:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:03:56.396433Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499614398624437:2336];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:03:56.396533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499614398624438:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:56.396553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499614398624438:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:56.396590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499614398624438:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:56.396608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499614398624438:2337];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLAS ... tion=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:56.430884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:56.430892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:56.430901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:56.430910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:56.430919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:56.431844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499614398624650:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:56.431877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499614398624650:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:56.431920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499614398624650:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:56.431944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499614398624650:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:03:56.431969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499614398624650:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:03:56.431995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499614398624650:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:03:56.432023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499614398624650:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:03:56.432046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499614398624650:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:03:56.432064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499614398624650:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:03:56.432082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499614398624650:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:03:56.432102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499614398624650:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:03:56.432121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499614398624650:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:03:56.432147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499614398624650:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:03:56.432754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:56.432765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499614398624438:2337];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715660;this=21880562239664;method=TTxController::StartProposeOnExecute;tx_info=281474976715660:TX_KIND_SCHEMA;min=1762178636431;max=18446744073709551615;plan=0;src=[1:7568499610103656537:2146];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-11-03T14:03:56.432768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:56.432783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:56.432787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:56.432806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:56.432815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:56.432828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:56.432838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:56.432845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:56.432850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:56.432857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:56.432868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:56.432892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:56.432903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:56.432916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:56.432926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:56.432933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:56.432940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:56.432950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:56.432955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:56.432968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:56.432977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:56.432987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:56.432995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:03:56.433148Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7FD20DE32AC2 13. ??:0: ?? @ 0x7FD20DEC48BF ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[ypath-direct_read_from_dynamic--Results] [GOOD] Test command err: 127.0.0.1 - - [03/Nov/2025 14:02:41] "GET /foo.txt HTTP/1.1" 200 - 127.0.0.1 - - [03/Nov/2025 14:02:43] "GET /foo.txt HTTP/1.1" 200 - >> TKesusTest::TestAcquireUpgrade >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> TKesusTest::TestSessionTimeoutAfterDetach >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestKesusConfig >> TKesusTest::TestReleaseLockFailure >> KqpIndexLookupJoin::JoinInclusionTestSemiJoin-StreamLookupJoin [GOOD] >> test.py::test[in-in_enum_single1-default.txt-Results] [GOOD] >> test.py::test[in-in_exists_immediate_nested_subq--Results] [SKIPPED] >> test.py::test[insert-drop_sortness-calc-Results] >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAcquireTimeout >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> KqpJoin::RightSemiJoin_KeyPrefix [GOOD] |56.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore |56.1%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part12/pytest >> test.py::test[ypath-multi_key-default.txt-Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinInclusionTestSemiJoin-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 12149, MsgBus: 12582 2025-11-03T14:03:57.328459Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499617861406788:2075];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:57.328592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001dfd/r3tmp/tmpjBG8y3/pdisk_1.dat 2025-11-03T14:03:57.359387Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:57.367473Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:57.367641Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499617861406751:2081] 1762178637327357 != 1762178637327360 TServer::EnableGrpc on GrpcPort 12149, node 1 2025-11-03T14:03:57.374317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:57.374334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:57.374335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:57.374376Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12582 TClient is connected to server localhost:12582 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:57.437401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:57.437432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:57.438222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:57.438501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-11-03T14:03:57.452356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:57.469899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:57.493162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:57.503602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:57.631101Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:57.688647Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499617861408397:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.688699Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.688758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499617861408407:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.688771Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.753246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.761130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.773681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.787286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.793784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.801201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.816063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.830253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:57.846305Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499617861409269:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.846337Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.846374Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499617861409274:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.846381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499617861409275:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.846388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:57.847302Z node 1 :FLAT_TX_SCHEMESHARD ... GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.951266Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.959167Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.971954Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.986435Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.000119Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.014164Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.028102Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.043291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.056791Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.082032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.083800Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:04:00.090535Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"a" (OptionalType (DataType 'Int32))) '('"b" (OptionalType (DataType 'Int32)))))) (let $1 (KqpTable '"/Root/A" '"72057594046644480:21" '"" '1)) (let $2 '('"a" '"b")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 '('"_partition_mode" '"single")) (let $5 '('('"_logical_id" '1522) '('"_id" '"eec98a0b-266d724d-d36739c6-4fae8a68") $4)) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($32) $32) $5)) (let $7 (DqCnResult (TDqOutput $6 '"0") '())) (let $8 '('('"type" '"generic"))) (let $9 (KqpPhysicalTx '($6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (OptionalType (DataType 'Int32))) (let $12 '('"b" $11)) (let $13 (ListType (StructType '('"a" $11) $12))) (let $14 %kqp%tx_result_binding_0_0) (let $15 '('('"_logical_id" '1155) '('"_id" '"f3dc6f7e-29ad023c-42d4610d-602663bc") $4)) (let $16 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $14 (lambda '($33) (Member $33 '"b")) (Void) (Void) (lambda '($34) (OrderedFilter (FlatMap $34 (lambda '($35) (Map (Take (Nth $35 '1) (Uint64 '1)) (lambda '($36) (AsStruct '('"b" (Member $36 '"b"))))))) (lambda '($37) (Exists (Member $37 '"b")))))))) $15)) (let $17 (KqpTable '"/Root/B" '"72057594046644480:22" '"" '1)) (let $18 (StructType $12)) (let $19 (KqpCnStreamLookup (TDqOutput $16 '"0") $17 $2 (ListType $18) '('('"Strategy" '"LookupRows")))) (let $20 '('('"_logical_id" '1303) '('"_id" '"b146380-f18d3e27-b205fd61-210f8e65") '('"_wide_channels" $18))) (let $21 (DqPhyStage '($19) (lambda '($38) (block '( (let $39 (lambda '($40) (block '( (let $41 (Member $40 '"b")) (let $42 (Member $40 '"a")) (let $43 (Bool 'false)) (let $44 (And (Exists $41) (Coalesce (> $42 (Int32 '1)) $43) (Coalesce (< $42 (Int32 '"3")) $43))) (return $41 $44) )))) (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow $38) $39) (lambda '($45 $46) $46)) (lambda '($47 $48) $47)))) ))) $20)) (let $22 (DqCnBroadcast (TDqOutput $21 '"0"))) (let $23 (StructType '('"A.a" $11) '('"A.b" $11))) (let $24 '('('"_logical_id" '1474) '('"_id" '"4e9abe17-a880e670-57705b52-391178b1") '('"_wide_channels" $23))) (let $25 (DqPhyStage '($22) (lambda '($49) (block '( (let $50 '('One 'Hashed 'Compact)) (let $51 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $49) (lambda '($55) (Exists $55))) (lambda '($56) $56)) (lambda '($57) $57) (lambda '($58) (Void)) $50)) (let $52 (Bool 'true)) (let $53 (Sort (FlatMap $51 (lambda '($59) (block '( (let $60 '('"b")) (let $61 '('"a" '"A.a" '"b" '"A.b")) (return (MapJoinCore (OrderedFilter (ToFlow $14) (lambda '($62) (Exists (Member $62 '"b")))) $59 '"LeftSemi" $60 $60 $61 '() '('"A.b") '('"B.b"))) )))) '($52 $52) (lambda '($63) '((Member $63 '"A.a") (Member $63 '"A.b"))))) (let $54 (lambda '($64) (Member $64 '"A.a") (Member $64 '"A.b"))) (return (FromFlow (ExpandMap $53 $54))) ))) $24)) (let $26 (DqCnMerge (TDqOutput $25 '"0") '('('"0" '"Asc") '('1 '"Asc")))) (let $27 (DqPhyStage '($26) (lambda '($65) (FromFlow (NarrowMap (ToFlow $65) (lambda '($66 $67) (AsStruct '('"A.a" $66) '('"A.b" $67)))))) '('('"_logical_id" '1486) '('"_id" '"c58848c6-a123c264-68ade1cd-7d1e07b2")))) (let $28 '($16 $21 $25 $27)) (let $29 (DqCnResult (TDqOutput $27 '"0") '('"A.a" '"A.b"))) (let $30 (KqpTxResultBinding $13 '"0" '"0")) (let $31 (KqpPhysicalTx $28 '($29) '('($10 $30)) $8)) (return (KqpPhysicalQuery '($9 $31) '((KqpTxResultBinding (ListType $23) '1 '"0")) '('('"type" '"query")))) ) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"a" (OptionalType (DataType 'Int32))) '('"b" (OptionalType (DataType 'Int32)))))) (let $1 (KqpTable '"/Root/A" '"72057594046644480:21" '"" '1)) (let $2 '('"a" '"b")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 '('"_partition_mode" '"single")) (let $5 '('('"_logical_id" '1555) '('"_id" '"a670a1ed-5e50af31-1952969c-436664af") $4)) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($33) $33) $5)) (let $7 (DqCnResult (TDqOutput $6 '0) '())) (let $8 '('('"type" '"data"))) (let $9 (KqpPhysicalTx '($6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (OptionalType (DataType 'Int32))) (let $12 '('"b" $11)) (let $13 (ListType (StructType '('"a" $11) $12))) (let $14 %kqp%tx_result_binding_0_0) (let $15 '('('"_logical_id" '1186) '('"_id" '"6e0a6dee-6405e00d-92523dcd-1c2618aa") $4)) (let $16 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $14 (lambda '($34) (Member $34 '"b")) (Void) (Void) (lambda '($35) (OrderedFilter (FlatMap $35 (lambda '($36) (Map (Take (Nth $36 '1) (Uint64 '1)) (lambda '($37) (AsStruct '('"b" (Member $37 '"b"))))))) (lambda '($38) (Exists (Member $38 '"b")))))))) $15)) (let $17 (KqpTable '"/Root/B" '"72057594046644480:22" '"" '1)) (let $18 (StructType $12)) (let $19 (KqpCnStreamLookup (TDqOutput $16 '0) $17 $2 (ListType $18) '('('"Strategy" '"LookupRows")))) (let $20 '('('"_logical_id" '1334) '('"_id" '"11259b38-9fcaf1dd-807a2746-c3b42d46") '('"_wide_channels" $18))) (let $21 (DqPhyStage '($19) (lambda '($39) (block '( (let $40 (lambda '($41) (block '( (let $42 (Member $41 '"b")) (let $43 (Member $41 '"a")) (let $44 (Bool 'false)) (let $45 (And (Exists $42) (Coalesce (> $43 (Int32 '1)) $44) (Coalesce (< $43 (Int32 '"3")) $44))) (return $42 $45) )))) (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow $39) $40) (lambda '($46 $47) $47)) (lambda '($48 $49) $48)))) ))) $20)) (let $22 (DqCnBroadcast (TDqOutput $21 '0))) (let $23 (Uint64 '"1001")) (let $24 (StructType '('"A.a" $11) '('"A.b" $11))) (let $25 '('('"_logical_id" '1505) '('"_id" '"9fdf281d-3eb5190c-81a7f2d0-d2553c08") '('"_wide_channels" $24))) (let $26 (DqPhyStage '($22) (lambda '($50) (block '( (let $51 '('One 'Hashed 'Compact)) (let $52 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $50) (lambda '($56) (Exists $56))) (lambda '($57) $57)) (lambda '($58) $58) (lambda '($59) (Void)) $51)) (let $53 (Bool 'true)) (let $54 (TopSort (FlatMap $52 (lambda '($60) (block '( (let $61 '('"b")) (let $62 '('"a" '"A.a" '"b" '"A.b")) (return (MapJoinCore (OrderedFilter (ToFlow $14) (lambda '($63) (Exists (Member $63 '"b")))) $60 '"LeftSemi" $61 $61 $62 '() '('"A.b") '('"B.b"))) )))) $23 '($53 $53) (lambda '($64) '((Member $64 '"A.a") (Member $64 '"A.b"))))) (let $55 (lambda '($65) (Member $65 '"A.a") (Member $65 '"A.b"))) (return (FromFlow (ExpandMap $54 $55))) ))) $25)) (let $27 (DqCnMerge (TDqOutput $26 '0) '('('0 '"Asc") '('1 '"Asc")))) (let $28 (DqPhyStage '($27) (lambda '($66) (FromFlow (NarrowMap (Take (ToFlow $66) $23) (lambda '($67 $68) (AsStruct '('"A.a" $67) '('"A.b" $68)))))) '('('"_logical_id" '1518) '('"_id" '"13559b1c-df9c13eb-9a414814-960dcaa4")))) (let $29 '($16 $21 $26 $28)) (let $30 (DqCnResult (TDqOutput $28 '0) '('"A.a" '"A.b"))) (let $31 (KqpTxResultBinding $13 '0 '0)) (let $32 (KqpPhysicalTx $29 '($30) '('($10 $31)) $8)) (return (KqpPhysicalQuery '($9 $32) '((KqpTxResultBinding (ListType $24) '1 '0)) '('('"type" '"data_query")))) ) >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] >> TKesusTest::TestQuoterHDRRParametersValidation ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_KeyPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 16214, MsgBus: 25680 2025-11-03T14:03:58.312971Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499623275188598:2067];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:58.313268Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e13/r3tmp/tmpm8dDce/pdisk_1.dat 2025-11-03T14:03:58.355930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:58.367219Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:58.367445Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499623275188571:2081] 1762178638312653 != 1762178638312656 TServer::EnableGrpc on GrpcPort 16214, node 1 2025-11-03T14:03:58.374524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:58.374535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:58.374537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:58.374572Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25680 TClient is connected to server localhost:25680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-11-03T14:03:58.421175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:58.421198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:58.422350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:58.429048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:58.530477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.547601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.571342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.580610Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:58.584553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.737728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499623275190220:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.737753Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.737837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499623275190230:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.737850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.778495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.786313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.795042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.809552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.824004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.837461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.851860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.865860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.881965Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499623275191092:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.881981Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499623275191097:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.881985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.882023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499623275191100:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.882033Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.882526Z node 1 :FLAT_TX_SCHEMESHARD ... s. 2025-11-03T14:03:59.723035Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:59.734693Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:59.746183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:59.770065Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:59.781480Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:59.821206Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:00.122040Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499629545440980:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:00.122070Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:00.122127Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499629545440990:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:00.122135Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:00.132282Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.138759Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.146210Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.153299Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.167539Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.181781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.195907Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.210468Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.228824Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499629545441852:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:00.228873Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:00.228932Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499629545441857:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:00.228946Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499629545441858:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:00.228980Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:00.229789Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:00.237139Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7568499629545441861:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-11-03T14:04:00.331742Z node 2 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [2:7568499629545441913:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:00.551624Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.557661Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.566174Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.580581Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.594431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.651321Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestSemaphoreData >> test.py::test[blocks-pg_sort--ForceBlocks] [GOOD] >> test.py::test[blocks-pg_sort--Results] >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup [GOOD] >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease >> CommitOffset::Commit_Flat_WithWrongSession [GOOD] >> CommitOffset::Commit_Flat_WithWrongSession_ToPast >> test.py::test[join-nopushdown_filter_over_inner-off-ForceBlocks] [GOOD] >> test.py::test[join-nopushdown_filter_over_inner-off-Results] [SKIPPED] >> test.py::test[join-premap_common_left_cross--ForceBlocks] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK >> TKesusTest::TestAcquireLocks >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 7011, MsgBus: 19214 2025-11-03T14:03:57.917591Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499617731920783:2067];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:57.917688Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e19/r3tmp/tmpybrZsC/pdisk_1.dat 2025-11-03T14:03:57.950441Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:57.958981Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:57.959165Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499617731920753:2081] 1762178637917311 != 1762178637917314 TServer::EnableGrpc on GrpcPort 7011, node 1 2025-11-03T14:03:57.969205Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:57.969223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:57.969225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:57.969267Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19214 TClient is connected to server localhost:19214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:58.021267Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:58.021294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:58.022346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:58.026490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:58.035961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.051713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.072406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.083823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.139137Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:58.313683Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499622026889694:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.313728Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.313816Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499622026889704:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.313831Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.353940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.362148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.375512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.389407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.403806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.417547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.431294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.445968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.461924Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499622026890567:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.461940Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.461981Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499622026890572:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.462000Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499622026890573:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.462014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.462613Z node 1 :FLAT_TX_SCHEMESHARD W ... ation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.559544Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.573221Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.580462Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.594402Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.608872Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.623720Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.638448Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.663469Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:04:00.665911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.674608Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Key" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 '('"Fk" '"Key" '"Value")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($32) (FromFlow (Filter (ToFlow $32) (lambda '($33) (And (Exists (Member $33 '"Fk")) (Coalesce (!= (Member $33 '"Value") (String '"Value1")) (Bool 'false))))))) '('('"_logical_id" '1071) '('"_id" '"1ab43f18-4e97a32e-13ea6e3c-7436a629")))) (let $5 (DqCnUnionAll (TDqOutput $4 '0))) (let $6 (DqPhyStage '($5) (lambda '($34) $34) '('('"_logical_id" '1451) '('"_id" '"eb777f91-8fbe8460-6d9c07b4-83f7f76e")))) (let $7 (DqCnResult (TDqOutput $6 '0) '())) (let $8 '('('"type" '"generic"))) (let $9 (KqpPhysicalTx '($4 $6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (DataType 'Int32)) (let $12 (OptionalType $11)) (let $13 '('"Key" $12)) (let $14 (OptionalType (DataType 'String))) (let $15 (StructType '('"Fk" $12) $13 '('"Value" $14))) (let $16 (ListType $15)) (let $17 %kqp%tx_result_binding_0_0) (let $18 '('('"_logical_id" '1114) '('"_id" '"491e884b-b3ed6093-96b95292-d37d0b07") '('"_partition_mode" '"single"))) (let $19 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $17 (lambda '($35) (Member $35 '"Fk")) (Void) (Void) (lambda '($36) (Map (Filter (FlatMap $36 (lambda '($37) (Map (Take (Nth $37 '1) (Uint64 '1)) (lambda '($38) (AsStruct '('"Fk" (Member $38 '"Fk"))))))) (lambda '($39) (Exists (Member $39 '"Fk")))) (lambda '($40) (AsStruct '('"Key" (Member $40 '"Fk"))))))))) $18)) (let $20 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $21 (KqpCnStreamLookup (TDqOutput $19 '0) $20 '('"Key" '"Value") (ListType (StructType $13)) '('('"Strategy" '"LookupRows")))) (let $22 (StructType '('"l.Fk" $12) '('"l.Key" $12) '('"l.Value" $14) '('"r.Key" $12) '('"r.Value" $14))) (let $23 '('('"_logical_id" '1334) '('"_id" '"17afd429-e3abeb6b-e59f99fe-c8596063") '('"_wide_channels" $22))) (let $24 (DqPhyStage '($21) (lambda '($41) (block '( (let $42 '('Many 'Hashed 'Compact)) (let $43 (SqueezeToDict (FlatMap (ToFlow $17) (lambda '($46) (block '( (let $47 (Member $46 '"Fk")) (let $48 (Nothing (OptionalType (TupleType $11 $15)))) (let $49 (IfPresent $47 (lambda '($50) (Just '($50 $46))) $48)) (return (If (Exists $47) $49 $48)) )))) (lambda '($51) (Nth $51 '0)) (lambda '($52) (Nth $52 '1)) $42)) (let $44 (Sort (FlatMap $43 (lambda '($53) (block '( (let $54 '('"Key" '"r.Key" '"Value" '"r.Value")) (let $55 '('"Fk" '"l.Fk" '"Key" '"l.Key" '"Value" '"l.Value")) (return (MapJoinCore (OrderedFilter (ToFlow $41) (lambda '($56) (Exists (Member $56 '"Key")))) $53 '"Inner" '('"Key") '('"Fk") $54 $55 '('"r.Key") '('"l.Fk"))) )))) (Bool 'true) (lambda '($57) (Member $57 '"l.Key")))) (let $45 (lambda '($58) (Member $58 '"l.Fk") (Member $58 '"l.Key") (Member $58 '"l.Value") (Member $58 '"r.Key") (Member $58 '"r.Value"))) (return (FromFlow (ExpandMap $44 $45))) ))) $23)) (let $25 (DqCnMerge (TDqOutput $24 '0) '('('1 '"Asc")))) (let $26 (DqPhyStage '($25) (lambda '($59) (FromFlow (NarrowMap (ToFlow $59) (lambda '($60 $61 $62 $63 $64) (AsStruct '('"l.Fk" $60) '('"l.Key" $61) '('"l.Value" $62) '('"r.Key" $63) '('"r.Value" $64)))))) '('('"_logical_id" '1346) '('"_id" '"1dab6d1f-8caa6fb4-260a525e-261b4d31")))) (let $27 '($19 $24 $26)) (let $28 '('"l.Key" '"l.Fk" '"l.Value" '"r.Key" '"r.Value")) (let $29 (DqCnResult (TDqOutput $26 '0) $28)) (let $30 (KqpTxResultBinding $16 '0 '0)) (let $31 (KqpPhysicalTx $27 '($29) '('($10 $30)) $8)) (return (KqpPhysicalQuery '($9 $31) '((KqpTxResultBinding (ListType $22) '1 '0)) '('('"type" '"query")))) ) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Key" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 '('"Fk" '"Key" '"Value")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($33) (FromFlow (Filter (ToFlow $33) (lambda '($34) (And (Exists (Member $34 '"Fk")) (Coalesce (!= (Member $34 '"Value") (String '"Value1")) (Bool 'false))))))) '('('"_logical_id" '1102) '('"_id" '"2d06bf28-4c10aab0-ace62275-f77cca91")))) (let $5 (DqCnUnionAll (TDqOutput $4 '0))) (let $6 (DqPhyStage '($5) (lambda '($35) $35) '('('"_logical_id" '1484) '('"_id" '"a8d96989-ec16258c-16a77f03-eecdc682")))) (let $7 (DqCnResult (TDqOutput $6 '0) '())) (let $8 '('('"type" '"data"))) (let $9 (KqpPhysicalTx '($4 $6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (DataType 'Int32)) (let $12 (OptionalType $11)) (let $13 '('"Key" $12)) (let $14 (OptionalType (DataType 'String))) (let $15 (StructType '('"Fk" $12) $13 '('"Value" $14))) (let $16 (ListType $15)) (let $17 %kqp%tx_result_binding_0_0) (let $18 '('('"_logical_id" '1145) '('"_id" '"6577bb65-732a51b4-ff4ab791-87a70dc9") '('"_partition_mode" '"single"))) (let $19 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $17 (lambda '($36) (Member $36 '"Fk")) (Void) (Void) (lambda '($37) (Map (Filter (FlatMap $37 (lambda '($38) (Map (Take (Nth $38 '1) (Uint64 '1)) (lambda '($39) (AsStruct '('"Fk" (Member $39 '"Fk"))))))) (lambda '($40) (Exists (Member $40 '"Fk")))) (lambda '($41) (AsStruct '('"Key" (Member $41 '"Fk"))))))))) $18)) (let $20 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $21 (KqpCnStreamLookup (TDqOutput $19 '0) $20 '('"Key" '"Value") (ListType (StructType $13)) '('('"Strategy" '"LookupRows")))) (let $22 (Uint64 '"1001")) (let $23 (StructType '('"l.Fk" $12) '('"l.Key" $12) '('"l.Value" $14) '('"r.Key" $12) '('"r.Value" $14))) (let $24 '('('"_logical_id" '1365) '('"_id" '"281e422a-d290c80f-da17ef43-819f2446") '('"_wide_channels" $23))) (let $25 (DqPhyStage '($21) (lambda '($42) (block '( (let $43 '('Many 'Hashed 'Compact)) (let $44 (SqueezeToDict (FlatMap (ToFlow $17) (lambda '($47) (block '( (let $48 (Member $47 '"Fk")) (let $49 (Nothing (OptionalType (TupleType $11 $15)))) (let $50 (IfPresent $48 (lambda '($51) (Just '($51 $47))) $49)) (return (If (Exists $48) $50 $49)) )))) (lambda '($52) (Nth $52 '0)) (lambda '($53) (Nth $53 '1)) $43)) (let $45 (TopSort (FlatMap $44 (lambda '($54) (block '( (let $55 '('"Key" '"r.Key" '"Value" '"r.Value")) (let $56 '('"Fk" '"l.Fk" '"Key" '"l.Key" '"Value" '"l.Value")) (return (MapJoinCore (OrderedFilter (ToFlow $42) (lambda '($57) (Exists (Member $57 '"Key")))) $54 '"Inner" '('"Key") '('"Fk") $55 $56 '('"r.Key") '('"l.Fk"))) )))) $22 (Bool 'true) (lambda '($58) (Member $58 '"l.Key")))) (let $46 (lambda '($59) (Member $59 '"l.Fk") (Member $59 '"l.Key") (Member $59 '"l.Value") (Member $59 '"r.Key") (Member $59 '"r.Value"))) (return (FromFlow (ExpandMap $45 $46))) ))) $24)) (let $26 (DqCnMerge (TDqOutput $25 '0) '('('1 '"Asc")))) (let $27 (DqPhyStage '($26) (lambda '($60) (FromFlow (NarrowMap (Take (ToFlow $60) $22) (lambda '($61 $62 $63 $64 $65) (AsStruct '('"l.Fk" $61) '('"l.Key" $62) '('"l.Value" $63) '('"r.Key" $64) '('"r.Value" $65)))))) '('('"_logical_id" '1378) '('"_id" '"fc72c2e1-7d54d78a-c87e3f4f-a31c6e14")))) (let $28 '($19 $25 $27)) (let $29 '('"l.Key" '"l.Fk" '"l.Value" '"r.Key" '"r.Value")) (let $30 (DqCnResult (TDqOutput $27 '0) $29)) (let $31 (KqpTxResultBinding $16 '0 '0)) (let $32 (KqpPhysicalTx $28 '($30) '('($10 $31)) $8)) (return (KqpPhysicalQuery '($9 $32) '((KqpTxResultBinding (ListType $23) '1 '0)) '('('"type" '"data_query")))) ) >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange >> TKesusTest::TestUnregisterProxy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 14600, MsgBus: 27193 2025-11-03T14:03:58.311319Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499623621864007:2073];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:58.311472Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e15/r3tmp/tmpsSUpCc/pdisk_1.dat 2025-11-03T14:03:58.350011Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:58.352695Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:58.352891Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499623621863963:2081] 1762178638310584 != 1762178638310587 TServer::EnableGrpc on GrpcPort 14600, node 1 2025-11-03T14:03:58.368252Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:58.368266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:58.368268Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:58.368315Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27193 TClient is connected to server localhost:27193 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:58.418240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:58.418273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:58.419270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:58.421275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:58.525173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.545431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.561963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.571812Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:58.572211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:58.738601Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499623621865615:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.738639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.738699Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499623621865625:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.738709Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.797500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.805548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.816307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.829918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.836642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.844167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.858069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.872481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.888534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499623621866488:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.888553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499623621866493:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.888554Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.888591Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499623621866496:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.888600Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.889105Z node 1 :FLAT_TX_SCHEMESHARD ... /ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.838724Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.845755Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.853158Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.867082Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.874058Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.881349Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.896331Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.909555Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.923803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.948635Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:00.958545Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:01.021581Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Key" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 '('"Fk" '"Key" '"Value")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 '('"_partition_mode" '"single")) (let $5 '('('"_logical_id" '1118) '('"_id" '"5df48786-46ecc169-7869468d-4b45cc28") $4)) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($32) $32) $5)) (let $7 (DqCnResult (TDqOutput $6 '0) '())) (let $8 '('('"type" '"generic"))) (let $9 (KqpPhysicalTx '($6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (DataType 'Int32)) (let $12 (OptionalType $11)) (let $13 '('"Key" $12)) (let $14 (OptionalType (DataType 'String))) (let $15 (StructType '('"Fk" $12) $13 '('"Value" $14))) (let $16 (ListType $15)) (let $17 %kqp%tx_result_binding_0_0) (let $18 '('('"_logical_id" '852) '('"_id" '"2e1316e0-739903e1-39901ef5-43967afe") $4)) (let $19 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $17 (lambda '($33) (Member $33 '"Fk")) (Void) (Void) (lambda '($34) (Map (Filter (FlatMap $34 (lambda '($35) (Map (Take (Nth $35 '1) (Uint64 '1)) (lambda '($36) (AsStruct '('"Fk" (Member $36 '"Fk"))))))) (lambda '($37) (Exists (Member $37 '"Fk")))) (lambda '($38) (AsStruct '('"Key" (Member $38 '"Fk"))))))))) $18)) (let $20 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $21 (KqpCnStreamLookup (TDqOutput $19 '0) $20 '('"Key" '"Value") (ListType (StructType $13)) '('('"Strategy" '"LookupRows")))) (let $22 (StructType '('"l.Fk" $12) '('"l.Key" $12) '('"l.Value" $14) '('"r.Key" $12) '('"r.Value" $14))) (let $23 '('('"_logical_id" '1072) '('"_id" '"a437d779-b912fa83-32c86bff-8dde4f11") '('"_wide_channels" $22))) (let $24 (DqPhyStage '($21) (lambda '($39) (block '( (let $40 '('Many 'Hashed 'Compact)) (let $41 (SqueezeToDict (FlatMap (ToFlow $17) (lambda '($44) (block '( (let $45 (Member $44 '"Fk")) (let $46 (Nothing (OptionalType (TupleType $11 $15)))) (let $47 (IfPresent $45 (lambda '($48) (Just '($48 $44))) $46)) (return (If (Exists $45) $47 $46)) )))) (lambda '($49) (Nth $49 '0)) (lambda '($50) (Nth $50 '1)) $40)) (let $42 (Sort (FlatMap $41 (lambda '($51) (block '( (let $52 '('"Key" '"r.Key" '"Value" '"r.Value")) (let $53 '('"Fk" '"l.Fk" '"Key" '"l.Key" '"Value" '"l.Value")) (return (MapJoinCore (OrderedFilter (ToFlow $39) (lambda '($54) (Exists (Member $54 '"Key")))) $51 '"Inner" '('"Key") '('"Fk") $52 $53 '('"r.Key") '('"l.Fk"))) )))) (Bool 'true) (lambda '($55) (Member $55 '"l.Key")))) (let $43 (lambda '($56) (Member $56 '"l.Fk") (Member $56 '"l.Key") (Member $56 '"l.Value") (Member $56 '"r.Key") (Member $56 '"r.Value"))) (return (FromFlow (ExpandMap $42 $43))) ))) $23)) (let $25 (DqCnMerge (TDqOutput $24 '0) '('('1 '"Asc")))) (let $26 (DqPhyStage '($25) (lambda '($57) (FromFlow (NarrowMap (ToFlow $57) (lambda '($58 $59 $60 $61 $62) (AsStruct '('"l.Fk" $58) '('"l.Key" $59) '('"l.Value" $60) '('"r.Key" $61) '('"r.Value" $62)))))) '('('"_logical_id" '1084) '('"_id" '"accc3632-4543cb0a-1ac56a63-66668a8c")))) (let $27 '($19 $24 $26)) (let $28 '('"l.Key" '"l.Fk" '"l.Value" '"r.Key" '"r.Value")) (let $29 (DqCnResult (TDqOutput $26 '0) $28)) (let $30 (KqpTxResultBinding $16 '0 '0)) (let $31 (KqpPhysicalTx $27 '($29) '('($10 $30)) $8)) (return (KqpPhysicalQuery '($9 $31) '((KqpTxResultBinding (ListType $22) '1 '0)) '('('"type" '"query")))) ) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Key" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 '('"Fk" '"Key" '"Value")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 '('"_partition_mode" '"single")) (let $5 '('('"_logical_id" '1151) '('"_id" '"82197157-8b1a5fc2-8f77c084-dc57251f") $4)) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($33) $33) $5)) (let $7 (DqCnResult (TDqOutput $6 '0) '())) (let $8 '('('"type" '"data"))) (let $9 (KqpPhysicalTx '($6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (DataType 'Int32)) (let $12 (OptionalType $11)) (let $13 '('"Key" $12)) (let $14 (OptionalType (DataType 'String))) (let $15 (StructType '('"Fk" $12) $13 '('"Value" $14))) (let $16 (ListType $15)) (let $17 %kqp%tx_result_binding_0_0) (let $18 '('('"_logical_id" '883) '('"_id" '"3234b2a7-59b2c7e3-45006f9-a474766a") $4)) (let $19 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $17 (lambda '($34) (Member $34 '"Fk")) (Void) (Void) (lambda '($35) (Map (Filter (FlatMap $35 (lambda '($36) (Map (Take (Nth $36 '1) (Uint64 '1)) (lambda '($37) (AsStruct '('"Fk" (Member $37 '"Fk"))))))) (lambda '($38) (Exists (Member $38 '"Fk")))) (lambda '($39) (AsStruct '('"Key" (Member $39 '"Fk"))))))))) $18)) (let $20 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $21 (KqpCnStreamLookup (TDqOutput $19 '0) $20 '('"Key" '"Value") (ListType (StructType $13)) '('('"Strategy" '"LookupRows")))) (let $22 (Uint64 '"1001")) (let $23 (StructType '('"l.Fk" $12) '('"l.Key" $12) '('"l.Value" $14) '('"r.Key" $12) '('"r.Value" $14))) (let $24 '('('"_logical_id" '1103) '('"_id" '"33774559-4f6f7d41-ce366bc-79f79ba5") '('"_wide_channels" $23))) (let $25 (DqPhyStage '($21) (lambda '($40) (block '( (let $41 '('Many 'Hashed 'Compact)) (let $42 (SqueezeToDict (FlatMap (ToFlow $17) (lambda '($45) (block '( (let $46 (Member $45 '"Fk")) (let $47 (Nothing (OptionalType (TupleType $11 $15)))) (let $48 (IfPresent $46 (lambda '($49) (Just '($49 $45))) $47)) (return (If (Exists $46) $48 $47)) )))) (lambda '($50) (Nth $50 '0)) (lambda '($51) (Nth $51 '1)) $41)) (let $43 (TopSort (FlatMap $42 (lambda '($52) (block '( (let $53 '('"Key" '"r.Key" '"Value" '"r.Value")) (let $54 '('"Fk" '"l.Fk" '"Key" '"l.Key" '"Value" '"l.Value")) (return (MapJoinCore (OrderedFilter (ToFlow $40) (lambda '($55) (Exists (Member $55 '"Key")))) $52 '"Inner" '('"Key") '('"Fk") $53 $54 '('"r.Key") '('"l.Fk"))) )))) $22 (Bool 'true) (lambda '($56) (Member $56 '"l.Key")))) (let $44 (lambda '($57) (Member $57 '"l.Fk") (Member $57 '"l.Key") (Member $57 '"l.Value") (Member $57 '"r.Key") (Member $57 '"r.Value"))) (return (FromFlow (ExpandMap $43 $44))) ))) $24)) (let $26 (DqCnMerge (TDqOutput $25 '0) '('('1 '"Asc")))) (let $27 (DqPhyStage '($26) (lambda '($58) (FromFlow (NarrowMap (Take (ToFlow $58) $22) (lambda '($59 $60 $61 $62 $63) (AsStruct '('"l.Fk" $59) '('"l.Key" $60) '('"l.Value" $61) '('"r.Key" $62) '('"r.Value" $63)))))) '('('"_logical_id" '1116) '('"_id" '"9e00ea16-96497f25-8588c0d-b3cff75c")))) (let $28 '($19 $25 $27)) (let $29 '('"l.Key" '"l.Fk" '"l.Value" '"r.Key" '"r.Value")) (let $30 (DqCnResult (TDqOutput $27 '0) $29)) (let $31 (KqpTxResultBinding $16 '0 '0)) (let $32 (KqpPhysicalTx $28 '($30) '('($10 $31)) $8)) (return (KqpPhysicalQuery '($9 $32) '((KqpTxResultBinding (ListType $23) '1 '0)) '('('"type" '"data_query")))) ) >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire >> TKesusTest::TestQuoterAccountResourcesBurst >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration |56.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures >> TKesusTest::TestAttachOutOfSequence >> TKesusTest::TestAcquireWaiterDowngrade >> test.py::test[aggregate-group_compact_sorted_with_diff_order--Results] [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-Results] >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister >> test.py::test[aggregate-group_by_expr_alias_on_subexp--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_and_having--Results] >> TKesusTest::TestQuoterResourceDescribe ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH9_100 Test command err: Trying to start YDB, gRPC: 2152, MsgBus: 11873 2025-11-03T14:03:58.029676Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499623916087987:2066];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:58.029826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e17/r3tmp/tmphcTDh6/pdisk_1.dat 2025-11-03T14:03:58.088095Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:58.088284Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:58.088435Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499623916087961:2081] 1762178638029443 != 1762178638029446 TServer::EnableGrpc on GrpcPort 2152, node 1 2025-11-03T14:03:58.095181Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:58.095194Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:58.095195Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:58.095224Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11873 2025-11-03T14:03:58.133321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:58.133350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:11873 2025-11-03T14:03:58.134421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:58.143214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:58.366646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:58.529480Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499623916088648:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.529489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499623916088637:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.529500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.529580Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499623916088652:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.529594Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.530399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:58.532443Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499623916088651:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:58.608266Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499623916088704:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:58.660678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-11-03T14:03:58.687821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499623916088906:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:58.687862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499623916088906:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:58.687892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499623916088906:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:58.687908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499623916088906:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:03:58.687932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499623916088906:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:03:58.687952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499623916088906:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:03:58.687971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499623916088906:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:03:58.687989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499623916088906:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:03:58.688006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499623916088906:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:03:58.688034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499623916088906:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:03:58.688053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499623916088906:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:03:58.688072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499623916088906:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:03:58.688088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499623916088906:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:03:58.688510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499623916088907:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:58.688546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499623916088907:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:58.688599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499623916088907:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:58.688629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499623916088907:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_ ... ion=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:58.720652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:58.720662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:58.720668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:58.720674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:58.720679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:58.722226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499623916089083:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:58.722236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499623916088907:2337];ev=NActors::IEventHandle;tablet_id=72075186224037889;tx_id=281474976715660;this=124565714078064;method=TTxController::StartProposeOnExecute;tx_info=281474976715660:TX_KIND_SCHEMA;min=1762178638721;max=18446744073709551615;plan=0;src=[1:7568499623916088307:2146];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-11-03T14:03:58.722245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499623916089083:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:58.722265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499623916089083:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:58.722282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499623916089083:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:03:58.722299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499623916089083:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:03:58.722315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499623916089083:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:03:58.722333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499623916089083:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:03:58.722349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499623916089083:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:03:58.722366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499623916089083:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:03:58.722381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499623916089083:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:03:58.722395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499623916089083:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:03:58.722411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499623916089083:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:03:58.722429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499623916089083:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:03:58.722793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:58.722805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:58.722824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:58.722828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:58.722879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:58.722887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:58.722894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:58.722897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:58.722902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:58.722905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:58.722909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:58.722912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:58.722940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:58.722949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:58.722959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:58.722963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:58.722967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:58.722971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:58.722974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:58.722977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:58.722986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:58.722988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:58.722995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:58.722999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:03:58.722551Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7F641C342AC2 13. ??:0: ?? @ 0x7F641C3D48BF ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2025-11-03T14:04:01.881307Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:01.881334Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:01.884334Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:01.884388Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:01.916385Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:01.916649Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=6183921900953307524, session=0, seqNo=0) 2025-11-03T14:04:01.916699Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:01.927682Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=6183921900953307524, session=1) 2025-11-03T14:04:01.927809Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:138:2163], cookie=11866994520574502125, session=0, seqNo=0) 2025-11-03T14:04:01.927854Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:01.940038Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:138:2163], cookie=11866994520574502125, session=2) 2025-11-03T14:04:01.940345Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-11-03T14:04:01.940414Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-11-03T14:04:01.940437Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:01.952185Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=111) 2025-11-03T14:04:01.952294Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=112, session=1, semaphore="Lock2" count=1) 2025-11-03T14:04:01.952336Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-11-03T14:04:01.952352Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-11-03T14:04:01.963224Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=112) 2025-11-03T14:04:01.963305Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:138:2163], cookie=222, session=2, semaphore="Lock1" count=1) 2025-11-03T14:04:01.963390Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:137:2162], cookie=333, name="Lock1") 2025-11-03T14:04:01.963399Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-11-03T14:04:01.963405Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-11-03T14:04:01.963419Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:138:2163], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-11-03T14:04:01.974359Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:138:2163], cookie=222) 2025-11-03T14:04:01.974400Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:137:2162], cookie=333) 2025-11-03T14:04:01.974411Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:138:2163], cookie=223) 2025-11-03T14:04:01.974518Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:137:2162], cookie=334, name="Lock2") 2025-11-03T14:04:01.974551Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-11-03T14:04:01.974566Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-11-03T14:04:01.985407Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:137:2162], cookie=334) 2025-11-03T14:04:01.985539Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:168:2190], cookie=14341194922562171605, name="Lock1") 2025-11-03T14:04:01.985555Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:168:2190], cookie=14341194922562171605) 2025-11-03T14:04:01.985618Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:171:2193], cookie=7325205646709045991, name="Lock2") 2025-11-03T14:04:01.985625Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:171:2193], cookie=7325205646709045991) 2025-11-03T14:04:01.988070Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:01.988092Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:01.988135Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:01.988240Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:02.030435Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:02.030491Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-11-03T14:04:02.030500Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-11-03T14:04:02.030586Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:210:2223], cookie=9433433968389839406, name="Lock1") 2025-11-03T14:04:02.030598Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:210:2223], cookie=9433433968389839406) 2025-11-03T14:04:02.030703Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:218:2230], cookie=8136032700213269083, name="Lock2") 2025-11-03T14:04:02.030709Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:218:2230], cookie=8136032700213269083) 2025-11-03T14:04:02.143890Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:02.143927Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:02.148281Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:02.148512Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:02.170218Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:02.170454Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:138:2162], cookie=677814756791540780, session=0, seqNo=0) 2025-11-03T14:04:02.170507Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:02.181341Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:138:2162], cookie=677814756791540780, session=1) 2025-11-03T14:04:02.181437Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:139:2163], cookie=16019476932167435573, session=0, seqNo=0) 2025-11-03T14:04:02.181475Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:02.192307Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:139:2163], cookie=16019476932167435573, session=2) 2025-11-03T14:04:02.192534Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-11-03T14:04:02.192574Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-11-03T14:04:02.192592Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:02.203624Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=111) 2025-11-03T14:04:02.203729Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=112, session=1, semaphore="Lock2" count=1) 2025-11-03T14:04:02.203775Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-11-03T14:04:02.203792Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-11-03T14:04:02.214805Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=112) 2025-11-03T14:04:02.214949Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=333, session=1, semaphore="Lock1" count=1) 2025-11-03T14:04:02.215040Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:139:2163], cookie=222, session=2, semaphore="Lock1" count=1) 2025-11-03T14:04:02.215060Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-11-03T14:04:02.215084Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:139:2163], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-11-03T14:04:02.226001Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=333) 2025-11-03T14:04:02.226032Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:139:2163], cookie=222) 2025-11-03T14:04:02.226042Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:139:2163], cookie=223) 2025-11-03T14:04:02.226176Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:165:2187], cookie=1805644495701922300, name="Lock1") 2025-11-03T14:04:02.226201Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:165:2187], cookie=1805644495701922300) 2025-11-03T14:04:02.226270Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:168:2190], cookie=8768027412071086102, name="Lock2") 2025-11-03T14:04:02.226278Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:168:2190], cookie=8768027412071086102) 2025-11-03T14:04:02.226326Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:171:2193], cookie=14241554028186226468, name="Lock1") 2025-11-03T14:04:02.226335Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:171:2193], cookie=14241554028186226468) 2025-11-03T14:04:02.226396Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:174:2196], cookie=13610864598754914440, name="Lock2") 2025-11-03T14:04:02.226404Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:174:2196], cookie=13610864598754914440) 2025-11-03T14:04:02.226441Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:139:2163], cookie=444, session=2, semaphore="Lock2" count=1) 2025-11-03T14:04:02.226482Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-11-03T14:04:02.237350Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:139:2163], cookie=444) 2025-11-03T14:04:02.237504Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:179:2201], cookie=5509822055232664710, name="Lock2") 2025-11-03T14:04:02.237524Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:179:2201], cookie=5509822055232664710) 2025-11-03T14:04:02.237577Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:182:2204], cookie=10666404881986763623, name="Lock2") 2025-11-03T14:04:02.237581Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:182:2204], cookie=10666404881986763623) 2025-11-03T14:04:02.240291Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:02.240322Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:02.240368Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:02.240518Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:02.282941Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:02.282990Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:02.283000Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-11-03T14:04:02.283006Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-11-03T14:04:02.283012Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-11-03T14:04:02.283121Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:221:2234], cookie=7331944675297665378, name="Lock1") 2025-11-03T14:04:02.283144Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:221:2234], cookie=7331944675297665378) 2025-11-03T14:04:02.283268Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:229:2241], cookie=17834252818249488632, name="Lock2") 2025-11-03T14:04:02.283275Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:229:2241], cookie=17834252818249488632) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 9730, MsgBus: 14145 2025-11-03T14:03:59.083533Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499626904972105:2069];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:59.083748Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e0f/r3tmp/tmpYTRwpm/pdisk_1.dat 2025-11-03T14:03:59.122394Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:59.132847Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:59.133031Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499626904972074:2081] 1762178639082808 != 1762178639082811 TServer::EnableGrpc on GrpcPort 9730, node 1 2025-11-03T14:03:59.141400Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:59.141413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:59.141415Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:59.141452Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:14145 TClient is connected to server localhost:14145 2025-11-03T14:03:59.187239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:59.187268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-11-03T14:03:59.188423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:59.195899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:59.197984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-11-03T14:03:59.241638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:59.260694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:59.282129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:59.294003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:03:59.387615Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:59.435510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499626904973729:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:59.435541Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:59.435612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499626904973739:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:59.435625Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:59.479608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.487153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.494978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.502308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.516361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.531100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.544694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.559119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:59.575728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499626904974601:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:59.575742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499626904974606:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:59.575761Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:59.575793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499626904974609:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:59.575797Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Da ... ate_table.cpp:690) 2025-11-03T14:04:01.581925Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:01.596334Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:01.612747Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:01.624433Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:01.639497Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:01.653314Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:01.666994Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:01.669278Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:04:01.693163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:01.701281Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Key" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 '('"Fk" '"Key" '"Value")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 '('"_partition_mode" '"single")) (let $5 '('('"_logical_id" '1108) '('"_id" '"c701e96e-d23f4638-efcd2d9a-f39fe8f0") $4)) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($36) $36) $5)) (let $7 (DqCnResult (TDqOutput $6 '0) '())) (let $8 '('('"type" '"generic"))) (let $9 (KqpPhysicalTx '($6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (DataType 'Int32)) (let $12 (OptionalType $11)) (let $13 '('"Key" $12)) (let $14 (OptionalType (DataType 'String))) (let $15 '('"Value" $14)) (let $16 (ListType (StructType '('"Fk" $12) $13 $15))) (let $17 %kqp%tx_result_binding_0_0) (let $18 '('('"_logical_id" '852) '('"_id" '"d419ed72-152a45a7-e5d7d104-f11b4d0e") $4)) (let $19 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $17 (lambda '($37) (Member $37 '"Fk")) (Void) (Void) (lambda '($38) (Map (Filter (FlatMap $38 (lambda '($39) (Map (Take (Nth $39 '1) (Uint64 '1)) (lambda '($40) (AsStruct '('"Fk" (Member $40 '"Fk"))))))) (lambda '($41) (Exists (Member $41 '"Fk")))) (lambda '($42) (AsStruct '('"Key" (Member $42 '"Fk"))))))))) $18)) (let $20 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $21 (KqpCnStreamLookup (TDqOutput $19 '0) $20 '('"Key" '"Value") (ListType (StructType $13)) '('('"Strategy" '"LookupRows")))) (let $22 (StructType $13 $15)) (let $23 '('('"_logical_id" '934) '('"_id" '"f985aa90-995714b1-9b9a9037-5ea4c3da") '('"_wide_channels" $22))) (let $24 (DqPhyStage '($21) (lambda '($43) (block '( (let $44 (lambda '($46) (block '( (let $47 (Member $46 '"Key")) (return $47 (Member $46 '"Value") (Exists $47)) )))) (let $45 (lambda '($51 $52 $53) $51 $52)) (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow $43) $44) (lambda '($48 $49 $50) $50)) $45))) ))) $23)) (let $25 (DqCnBroadcast (TDqOutput $24 '0))) (let $26 (StructType '('"l.Fk" $12) '('"l.Key" $12) '('"l.Value" $14) '('"r.Key" $12) '('"r.Value" $14))) (let $27 '('('"_logical_id" '1063) '('"_id" '"ceef6935-ac309f37-b30cb8ba-6ded6789") '('"_wide_channels" $26))) (let $28 (DqPhyStage '($25) (lambda '($54) (block '( (let $55 '('Many 'Hashed 'Compact)) (let $56 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $54) (lambda '($59 $60) (Exists $59))) (lambda '($61 $62) (IfPresent $61 (lambda '($63) (Just '($63 (AsStruct '('"Key" $61) '('"Value" $62))))) (Nothing (OptionalType (TupleType $11 $22)))))) (lambda '($64) (Nth $64 '0)) (lambda '($65) (Nth $65 '1)) $55)) (let $57 (Sort (FlatMap $56 (lambda '($66) (block '( (let $67 '('"Fk" '"l.Fk" '"Key" '"l.Key" '"Value" '"l.Value")) (let $68 '('"Key" '"r.Key" '"Value" '"r.Value")) (return (MapJoinCore (ToFlow $17) $66 '"Left" '('"Fk") '('"Key") $67 $68 '('"l.Fk") '('"r.Key"))) )))) (Bool 'true) (lambda '($69) (Member $69 '"l.Key")))) (let $58 (lambda '($70) (Member $70 '"l.Fk") (Member $70 '"l.Key") (Member $70 '"l.Value") (Member $70 '"r.Key") (Member $70 '"r.Value"))) (return (FromFlow (ExpandMap $57 $58))) ))) $27)) (let $29 (DqCnMerge (TDqOutput $28 '0) '('('1 '"Asc")))) (let $30 (DqPhyStage '($29) (lambda '($71) (FromFlow (NarrowMap (ToFlow $71) (lambda '($72 $73 $74 $75 $76) (AsStruct '('"l.Fk" $72) '('"l.Key" $73) '('"l.Value" $74) '('"r.Key" $75) '('"r.Value" $76)))))) '('('"_logical_id" '1075) '('"_id" '"297da441-756a2036-8ef417c8-6ec7320c")))) (let $31 '($19 $24 $28 $30)) (let $32 '('"r.Value" '"l.Key" '"r.Key" '"l.Value" '"l.Fk")) (let $33 (DqCnResult (TDqOutput $30 '0) $32)) (let $34 (KqpTxResultBinding $16 '0 '0)) (let $35 (KqpPhysicalTx $31 '($33) '('($10 $34)) $8)) (return (KqpPhysicalQuery '($9 $35) '((KqpTxResultBinding (ListType $26) '1 '0)) '('('"type" '"query")))) ) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32))) '('"Key" (OptionalType (DataType 'Int32))) '('"Value" (OptionalType (DataType 'String)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 '('"Fk" '"Key" '"Value")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 '('"_partition_mode" '"single")) (let $5 '('('"_logical_id" '1141) '('"_id" '"7ab46cdd-2dad994e-f105408a-413e3714") $4)) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($37) $37) $5)) (let $7 (DqCnResult (TDqOutput $6 '0) '())) (let $8 '('('"type" '"data"))) (let $9 (KqpPhysicalTx '($6) '($7) '() $8)) (let $10 '"%kqp%tx_result_binding_0_0") (let $11 (DataType 'Int32)) (let $12 (OptionalType $11)) (let $13 '('"Key" $12)) (let $14 (OptionalType (DataType 'String))) (let $15 '('"Value" $14)) (let $16 (ListType (StructType '('"Fk" $12) $13 $15))) (let $17 %kqp%tx_result_binding_0_0) (let $18 '('('"_logical_id" '883) '('"_id" '"e09ca5-218328b2-c31d60ad-d2aba523") $4)) (let $19 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey $17 (lambda '($38) (Member $38 '"Fk")) (Void) (Void) (lambda '($39) (Map (Filter (FlatMap $39 (lambda '($40) (Map (Take (Nth $40 '1) (Uint64 '1)) (lambda '($41) (AsStruct '('"Fk" (Member $41 '"Fk"))))))) (lambda '($42) (Exists (Member $42 '"Fk")))) (lambda '($43) (AsStruct '('"Key" (Member $43 '"Fk"))))))))) $18)) (let $20 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $21 (KqpCnStreamLookup (TDqOutput $19 '0) $20 '('"Key" '"Value") (ListType (StructType $13)) '('('"Strategy" '"LookupRows")))) (let $22 (StructType $13 $15)) (let $23 '('('"_logical_id" '965) '('"_id" '"d64c4317-9f0b1b28-45fad4c9-b604ca6c") '('"_wide_channels" $22))) (let $24 (DqPhyStage '($21) (lambda '($44) (block '( (let $45 (lambda '($47) (block '( (let $48 (Member $47 '"Key")) (return $48 (Member $47 '"Value") (Exists $48)) )))) (let $46 (lambda '($52 $53 $54) $52 $53)) (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow $44) $45) (lambda '($49 $50 $51) $51)) $46))) ))) $23)) (let $25 (DqCnBroadcast (TDqOutput $24 '0))) (let $26 (Uint64 '"1001")) (let $27 (StructType '('"l.Fk" $12) '('"l.Key" $12) '('"l.Value" $14) '('"r.Key" $12) '('"r.Value" $14))) (let $28 '('('"_logical_id" '1094) '('"_id" '"c4dc1971-908c691f-6b08eea3-9aa5bf7b") '('"_wide_channels" $27))) (let $29 (DqPhyStage '($25) (lambda '($55) (block '( (let $56 '('Many 'Hashed 'Compact)) (let $57 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $55) (lambda '($60 $61) (Exists $60))) (lambda '($62 $63) (IfPresent $62 (lambda '($64) (Just '($64 (AsStruct '('"Key" $62) '('"Value" $63))))) (Nothing (OptionalType (TupleType $11 $22)))))) (lambda '($65) (Nth $65 '0)) (lambda '($66) (Nth $66 '1)) $56)) (let $58 (TopSort (FlatMap $57 (lambda '($67) (block '( (let $68 '('"Fk" '"l.Fk" '"Key" '"l.Key" '"Value" '"l.Value")) (let $69 '('"Key" '"r.Key" '"Value" '"r.Value")) (return (MapJoinCore (ToFlow $17) $67 '"Left" '('"Fk") '('"Key") $68 $69 '('"l.Fk") '('"r.Key"))) )))) $26 (Bool 'true) (lambda '($70) (Member $70 '"l.Key")))) (let $59 (lambda '($71) (Member $71 '"l.Fk") (Member $71 '"l.Key") (Member $71 '"l.Value") (Member $71 '"r.Key") (Member $71 '"r.Value"))) (return (FromFlow (ExpandMap $58 $59))) ))) $28)) (let $30 (DqCnMerge (TDqOutput $29 '0) '('('1 '"Asc")))) (let $31 (DqPhyStage '($30) (lambda '($72) (FromFlow (NarrowMap (Take (ToFlow $72) $26) (lambda '($73 $74 $75 $76 $77) (AsStruct '('"l.Fk" $73) '('"l.Key" $74) '('"l.Value" $75) '('"r.Key" $76) '('"r.Value" $77)))))) '('('"_logical_id" '1107) '('"_id" '"aaff9618-7434bde0-3b4d9ee8-73d696fd")))) (let $32 '($19 $24 $29 $31)) (let $33 '('"r.Value" '"l.Key" '"r.Key" '"l.Value" '"l.Fk")) (let $34 (DqCnResult (TDqOutput $31 '0) $33)) (let $35 (KqpTxResultBinding $16 '0 '0)) (let $36 (KqpPhysicalTx $32 '($34) '('($10 $35)) $8)) (return (KqpPhysicalQuery '($9 $36) '((KqpTxResultBinding (ListType $27) '1 '0)) '('('"type" '"data_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH10 Test command err: Trying to start YDB, gRPC: 20004, MsgBus: 3714 2025-11-03T14:03:58.236978Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499622064839974:2067];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:58.237127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e16/r3tmp/tmpcznzZ2/pdisk_1.dat 2025-11-03T14:03:58.269009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:58.283327Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:58.283545Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499622064839942:2081] 1762178638236639 != 1762178638236642 TServer::EnableGrpc on GrpcPort 20004, node 1 2025-11-03T14:03:58.288703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:58.288721Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:58.288723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:58.288773Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3714 TClient is connected to server localhost:3714 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-11-03T14:03:58.340312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:58.340337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:58.341466Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:58.348371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:58.567328Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:58.647454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499622064840618:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.647463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499622064840629:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.647480Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.647532Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499622064840632:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.647553Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.648200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:58.649762Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499622064840633:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:58.729610Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499622064840685:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:58.768168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-11-03T14:03:58.800407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499622064840897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:58.800460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499622064840897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:58.800534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499622064840897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:58.800539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7568499622064840899:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:58.800567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499622064840897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:03:58.800580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7568499622064840899:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:58.800597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499622064840897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:03:58.800630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499622064840897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:03:58.800642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7568499622064840899:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:58.800678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7568499622064840899:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:03:58.800679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499622064840897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:03:58.800707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499622064840897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:03:58.800714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7568499622064840899:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:03:58.800749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499622064840897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:03:58.800757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7568499622064840899:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:03:58.800785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7568499622064840899:2338];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:03:58.800786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499622064840897:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDepre ... ion=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:58.848368Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:58.848387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:58.848397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:58.848408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:58.848417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:58.849788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499622064841010:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:58.849819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499622064841010:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:58.849864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499622064841010:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:58.849893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499622064841010:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:03:58.849921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499622064841010:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:03:58.849951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499622064841010:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:03:58.849983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499622064841010:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:03:58.850011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499622064841010:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:03:58.850038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499622064841010:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:03:58.850067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499622064841010:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:03:58.850096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499622064841010:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:03:58.850126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499622064841010:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:03:58.850154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499622064841010:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:03:58.850562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499622064840908:2339];ev=NActors::IEventHandle;tablet_id=72075186224037889;tx_id=281474976715660;this=124321975697040;method=TTxController::StartProposeOnExecute;tx_info=281474976715660:TX_KIND_SCHEMA;min=1762178638849;max=18446744073709551615;plan=0;src=[1:7568499622064840288:2146];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-11-03T14:03:58.850744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:58.850760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:58.850775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:58.850786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:58.850810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:58.850820Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:58.850833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:58.850843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:58.850850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:58.850860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:58.850867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:58.850876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:58.850902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:58.850927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:58.850947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:58.850956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:58.850963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:58.850973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:58.850980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:58.850989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:58.851006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:58.851016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:58.851028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:58.851037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:03:58.850957Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7FC595765AC2 13. ??:0: ?? @ 0x7FC5957F78BF ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+ColumnStore Test command err: Trying to start YDB, gRPC: 5356, MsgBus: 24831 2025-11-03T14:03:58.013307Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499620989113706:2069];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:58.013527Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e1a/r3tmp/tmpYZvpSr/pdisk_1.dat 2025-11-03T14:03:58.066764Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:58.066795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:58.067035Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499620989113677:2081] 1762178638012893 != 1762178638012896 TServer::EnableGrpc on GrpcPort 5356, node 1 2025-11-03T14:03:58.073571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:58.073586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:58.073588Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:58.073621Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24831 TClient is connected to server localhost:24831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-11-03T14:03:58.121932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:58.121960Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:58.123001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:58.126794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:58.326041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:58.386949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499620989114355:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.386951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499620989114366:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.386972Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.387039Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499620989114370:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.387047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.387699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:58.389151Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499620989114369:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:58.482724Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499620989114422:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:58.542345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-11-03T14:03:58.565629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499620989114636:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:58.565688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499620989114636:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:58.565728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499620989114636:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:58.565747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499620989114636:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:03:58.565765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499620989114636:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:03:58.565779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499620989114636:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:03:58.565806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499620989114636:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:03:58.565826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499620989114636:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:03:58.565840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499620989114636:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:03:58.565854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499620989114636:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:03:58.565871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499620989114637:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:58.565875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499620989114636:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:03:58.565889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499620989114636:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:03:58.565892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499620989114637:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:58.565921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499620989114636:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:03:58.565937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499620989114637:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:58.565962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499620989114637:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_ ... ion=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:58.598226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:58.598233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:58.598239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:58.598245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:58.598247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:58.599794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7568499620989114804:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:03:58.599809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7568499620989114804:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:03:58.599829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7568499620989114804:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:03:58.599847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7568499620989114804:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:03:58.599861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7568499620989114804:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:03:58.599877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7568499620989114804:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:03:58.599893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7568499620989114804:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:03:58.599908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7568499620989114804:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:03:58.599927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7568499620989114804:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:03:58.599929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499620989114637:2337];ev=NActors::IEventHandle;tablet_id=72075186224037889;tx_id=281474976715660;this=123153759060960;method=TTxController::StartProposeOnExecute;tx_info=281474976715660:TX_KIND_SCHEMA;min=1762178638598;max=18446744073709551615;plan=0;src=[1:7568499620989114024:2147];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-11-03T14:03:58.599942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7568499620989114804:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:03:58.599954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7568499620989114804:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:03:58.599968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7568499620989114804:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:03:58.599986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7568499620989114804:2351];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:03:58.600387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:58.600396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:58.600404Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:58.600407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:58.600428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:58.600482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:58.600498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:58.600502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:58.600508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:58.600512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:58.600517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:58.600524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:58.600539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:58.600546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:58.600554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:58.600561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:58.600565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:58.600568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:58.600572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:58.600576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:58.600584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:58.600589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:58.600599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:58.600602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:03:58.600181Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7FB5E8B71AC2 13. ??:0: ?? @ 0x7FB5E8C038BF >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> DataShardSnapshots::ShardRestartLockBasic >> TKesusTest::TestAcquireWaiterUpgrade >> TKesusTest::TestSemaphoreSessionFailures [GOOD] >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceCreation >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink >> TKesusTest::TestAttachNewSessions >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero >> test.py::test[count-count_all_view_concat--Results] [GOOD] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] >> test.py::test[window-full/session_incompat_sort--Results] [GOOD] >> test.py::test[window-lagging/aggregations--Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds-ColumnStore Test command err: Trying to start YDB, gRPC: 5068, MsgBus: 28174 2025-11-03T14:03:57.753312Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499617540983380:2069];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:57.753591Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e1b/r3tmp/tmpE3FidQ/pdisk_1.dat 2025-11-03T14:03:57.813451Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:57.813467Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:57.813662Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499617540983351:2081] 1762178637752844 != 1762178637752847 TServer::EnableGrpc on GrpcPort 5068, node 1 2025-11-03T14:03:57.819756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:57.819769Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:57.819771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:57.819807Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:28174 TClient is connected to server localhost:28174 2025-11-03T14:03:57.857249Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:57.857271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-11-03T14:03:57.858260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:57.865810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:58.080887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:58.093190Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499621835951337:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.093199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499621835951328:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.093220Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.093296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499621835951343:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.093313Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:58.093963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:58.095645Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499621835951342:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:03:58.164185Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499621835951395:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:03:58.198382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.214991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.223369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.235131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.249657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.279166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.285921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.298218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.305018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.318891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.332832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.347372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:58.361165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itsel ... -11-03T14:03:59.561720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:59.561731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:59.561739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:59.561757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:59.561765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:59.561774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:59.561783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:59.561789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:59.561798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:59.561803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:59.561807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:59.561826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:59.561834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:59.561845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:59.561853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:59.561858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:59.561862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:59.561867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:59.561875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:59.561885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:59.561894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:59.561903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:59.561910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:03:59.562003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:03:59.562012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:03:59.562021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:03:59.562025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:03:59.562041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:03:59.562050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:03:59.562059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:03:59.562067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:03:59.562073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:03:59.562078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:03:59.562084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:03:59.562093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:03:59.562113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:03:59.562121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:03:59.562131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:03:59.562139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:03:59.562144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:03:59.562148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:03:59.562153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:03:59.562161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:03:59.562172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:03:59.562180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:03:59.562189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:03:59.562197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037974;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:03:59.040207Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7FA04FC07AC2 13. ??:0: ?? @ 0x7FA04FC998BF >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterResourceModification >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters >> test.py::test[blocks-pg_sort--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2025-11-03T14:04:01.144150Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:01.144187Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:01.148922Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:01.149005Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:01.181068Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:01.181232Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=7968899657036535342, session=0, seqNo=0) 2025-11-03T14:04:01.181269Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:01.192123Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=7968899657036535342, session=1) 2025-11-03T14:04:01.192224Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=4200355241806412569, session=0, seqNo=0) 2025-11-03T14:04:01.192257Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:01.203131Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=4200355241806412569, session=2) 2025-11-03T14:04:01.203230Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:137:2162], cookie=111, name="Lock1") 2025-11-03T14:04:01.214082Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:137:2162], cookie=111) 2025-11-03T14:04:01.214175Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-11-03T14:04:01.214223Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-11-03T14:04:01.214239Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:01.227113Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=222) 2025-11-03T14:04:01.227202Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:137:2162], cookie=333, name="Lock1") 2025-11-03T14:04:01.238095Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:137:2162], cookie=333) 2025-11-03T14:04:01.399762Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:01.399801Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:01.404603Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:01.404792Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:01.426437Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:01.426687Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:138:2162], cookie=17652080632707185399, session=0, seqNo=0) 2025-11-03T14:04:01.426738Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:01.437697Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:138:2162], cookie=17652080632707185399, session=1) 2025-11-03T14:04:01.437811Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:138:2162], cookie=16355900026496649122, session=0, seqNo=0) 2025-11-03T14:04:01.437854Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:01.448768Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:138:2162], cookie=16355900026496649122, session=2) 2025-11-03T14:04:01.448946Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:149:2171], cookie=18363679241817122050, name="Sem1", limit=1) 2025-11-03T14:04:01.448989Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-11-03T14:04:01.459993Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:149:2171], cookie=18363679241817122050) 2025-11-03T14:04:01.460156Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=111, session=1, semaphore="Sem1" count=1) 2025-11-03T14:04:01.460208Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-11-03T14:04:01.460259Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=222, session=2, semaphore="Sem1" count=1) 2025-11-03T14:04:01.471243Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=111) 2025-11-03T14:04:01.471277Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=222) 2025-11-03T14:04:01.471442Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:157:2179], cookie=13070180616592465512, name="Sem1") 2025-11-03T14:04:01.471466Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:157:2179], cookie=13070180616592465512) 2025-11-03T14:04:01.471542Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:160:2182], cookie=8386927185568462818, name="Sem1") 2025-11-03T14:04:01.471551Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:160:2182], cookie=8386927185568462818) 2025-11-03T14:04:01.471588Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:138:2162], cookie=333, name="Sem1") 2025-11-03T14:04:01.471622Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2025-11-03T14:04:01.482520Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:138:2162], cookie=333) 2025-11-03T14:04:01.482694Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:165:2187], cookie=11882484395291168430, name="Sem1") 2025-11-03T14:04:01.482715Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:165:2187], cookie=11882484395291168430) 2025-11-03T14:04:01.482776Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:168:2190], cookie=651112723516273201, name="Sem1") 2025-11-03T14:04:01.482782Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:168:2190], cookie=651112723516273201) 2025-11-03T14:04:01.482816Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:138:2162], cookie=444, name="Sem1") 2025-11-03T14:04:01.482847Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-11-03T14:04:01.494699Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:138:2162], cookie=444) 2025-11-03T14:04:01.494909Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:173:2195], cookie=15208703898427125253, name="Sem1") 2025-11-03T14:04:01.494934Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:173:2195], cookie=15208703898427125253) 2025-11-03T14:04:01.495025Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:176:2198], cookie=13811745123031570430, name="Sem1") 2025-11-03T14:04:01.495032Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:176:2198], cookie=13811745123031570430) 2025-11-03T14:04:01.678157Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:01.678189Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:01.682678Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:01.682750Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:01.704040Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:01.704148Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:138:2162], cookie=4085058607543303096, name="Sem1", limit=1) 2025-11-03T14:04:01.704183Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-11-03T14:04:01.715025Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:138:2162], cookie=4085058607543303096) 2025-11-03T14:04:01.715161Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:147:2169], cookie=17037015486572337767, name="Sem2", limit=1) 2025-11-03T14:04:01.715201Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem2" 2025-11-03T14:04:01.726008Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:147:2169], cookie=17037015486572337767) 2025-11-03T14:04:01.726141Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:152:2174], cookie=2071327047923476266, name="Sem1") 2025-11-03T14:04:01.726160Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:152:2174], cookie=2071327047923476266) 2025-11-03T14:04:01.726227Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:155:2177], cookie=5734334161447204042, name="Sem2") 2025-11-03T14:04:01.726234Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:155:2177], cookie=5734334161447204042) 2025-11-03T14:04:01.729005Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:01.729032Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::E ... TxSemaphoreCreate::Complete (sender=[4:250:2271], cookie=15334139129369174739) 2025-11-03T14:04:02.459625Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2162], cookie=111, session=1, semaphore="Sem1" count=1) 2025-11-03T14:04:02.459663Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2025-11-03T14:04:02.470369Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2162], cookie=111) 2025-11-03T14:04:02.470482Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2162], cookie=222, session=2, semaphore="Sem1" count=1) 2025-11-03T14:04:02.481508Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2162], cookie=222) 2025-11-03T14:04:02.481607Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:137:2162], cookie=333, name="Sem1") 2025-11-03T14:04:02.481633Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2025-11-03T14:04:02.502617Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:137:2162], cookie=333) 2025-11-03T14:04:02.502787Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2162], cookie=444, session=2, semaphore="Sem1" count=1) 2025-11-03T14:04:02.513722Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2162], cookie=444) 2025-11-03T14:04:02.513852Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:137:2162], cookie=555, name="Sem1") 2025-11-03T14:04:02.513885Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2025-11-03T14:04:02.513896Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2025-11-03T14:04:02.524686Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:137:2162], cookie=555) 2025-11-03T14:04:02.620594Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:02.620626Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:02.624215Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:02.624311Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:02.656032Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:02.656221Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2162], cookie=5465857321972266174, session=0, seqNo=0) 2025-11-03T14:04:02.656270Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:02.667018Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2162], cookie=5465857321972266174, session=1) 2025-11-03T14:04:02.667100Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2162], cookie=112, name="Sem1", limit=5) 2025-11-03T14:04:02.667144Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-11-03T14:04:02.677875Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2162], cookie=112) 2025-11-03T14:04:02.677948Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2162], cookie=113, name="Sem1") 2025-11-03T14:04:02.688779Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2162], cookie=113) 2025-11-03T14:04:02.688859Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2162], cookie=114, name="Sem1", force=0) 2025-11-03T14:04:02.688880Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-11-03T14:04:02.699523Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2162], cookie=114) 2025-11-03T14:04:02.699588Z node 5 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[5:137:2162], cookie=5337864863922072744 2025-11-03T14:04:02.699622Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2162], cookie=115, name="Sem1", limit=5) 2025-11-03T14:04:02.710315Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2162], cookie=115) 2025-11-03T14:04:02.710389Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2162], cookie=116, name="Sem1") 2025-11-03T14:04:02.721348Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2162], cookie=116) 2025-11-03T14:04:02.721457Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2162], cookie=117, name="Sem1", force=0) 2025-11-03T14:04:02.732302Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2162], cookie=117) 2025-11-03T14:04:02.732406Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=118, session=1, semaphore="Sem1" count=1) 2025-11-03T14:04:02.743319Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=118) 2025-11-03T14:04:02.743413Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:137:2162], cookie=119, name="Sem1") 2025-11-03T14:04:02.754142Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:137:2162], cookie=119) 2025-11-03T14:04:02.754207Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:137:2162], cookie=120, name="Sem1") 2025-11-03T14:04:02.754219Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:137:2162], cookie=120) 2025-11-03T14:04:02.754241Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[5:137:2162], cookie=14215168775894024558, session=1) 2025-11-03T14:04:02.754254Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-11-03T14:04:02.765007Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[5:137:2162], cookie=14215168775894024558) 2025-11-03T14:04:02.765081Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2162], cookie=121, name="Sem1", limit=5) 2025-11-03T14:04:02.775878Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2162], cookie=121) 2025-11-03T14:04:02.775991Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2162], cookie=122, name="Sem1") 2025-11-03T14:04:02.786973Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2162], cookie=122) 2025-11-03T14:04:02.787087Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2162], cookie=123, name="Sem1", force=0) 2025-11-03T14:04:02.798042Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2162], cookie=123) 2025-11-03T14:04:02.798137Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=124, session=1, semaphore="Sem1" count=1) 2025-11-03T14:04:02.808982Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=124) 2025-11-03T14:04:02.809088Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:137:2162], cookie=125, name="Sem1") 2025-11-03T14:04:02.830102Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:137:2162], cookie=125) 2025-11-03T14:04:02.830181Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:137:2162], cookie=126, name="Sem1") 2025-11-03T14:04:02.830197Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:137:2162], cookie=126) 2025-11-03T14:04:02.830275Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:137:2162], cookie=127, name="Sem1", limit=5) 2025-11-03T14:04:02.830281Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:137:2162], cookie=127) 2025-11-03T14:04:02.830298Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:137:2162], cookie=128, name="Sem1") 2025-11-03T14:04:02.830305Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:137:2162], cookie=128) 2025-11-03T14:04:02.830322Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:137:2162], cookie=129, name="Sem1", force=0) 2025-11-03T14:04:02.830327Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:137:2162], cookie=129) 2025-11-03T14:04:02.830346Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=130, session=1, semaphore="Sem1" count=1) 2025-11-03T14:04:02.830351Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=130) 2025-11-03T14:04:02.830367Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:137:2162], cookie=131, name="Sem1") 2025-11-03T14:04:02.830371Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:137:2162], cookie=131) 2025-11-03T14:04:02.830384Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:137:2162], cookie=132, name="Sem1") 2025-11-03T14:04:02.830387Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:137:2162], cookie=132) |56.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |56.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |56.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachMissingSession >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease |56.1%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut >> TKesusTest::TestAcquireSemaphoreTimeout |56.1%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAttachTimeoutTooBig >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH16 >> TKesusTest::TestRegisterProxy >> test.py::test[ql_filter-integer_escaping--ForceBlocks] [GOOD] >> test.py::test[ql_filter-integer_escaping--Results] >> test.py::test[insert-drop_sortness-calc-Results] [GOOD] >> TKesusTest::TestGetQuoterResourceCounters [GOOD] >> TKesusTest::TestSessionDetach >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestCreateSemaphore >> KqpJoinOrder::ShuffleEliminationManyKeysJoinPredicate+EnableSeparationComputeActorsFromRead >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot [GOOD] >> DataShardSnapshots::BrokenLockChangesDontLeak >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterResourceDeletion >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath >> DataShardSnapshots::ShardRestartLockBasic [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTable >> TKesusTest::TestCreateSemaphore [GOOD] >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+ColumnStore >> test.py::test[tpch-q6-default.txt-Results] [GOOD] >> TKesusTest::TestRegisterProxy [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration >> test.py::test[type_v3-bare_yson--Results] [SKIPPED] >> test.py::test[type_v3-mergejoin_with_sort--Results] >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull >> TKesusTest::TestAllocatesResources [GOOD] >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2025-11-03T14:04:00.873444Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:00.873467Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:00.879192Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:00.879306Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:00.911349Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:00.911447Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:137:2162], cookie=15311746243355805670, path="/foo/bar/baz") 2025-11-03T14:04:00.922424Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:137:2162], cookie=15311746243355805670, status=SUCCESS) 2025-11-03T14:04:00.922569Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:147:2169], cookie=16757837402630263646) 2025-11-03T14:04:00.933392Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:147:2169], cookie=16757837402630263646) 2025-11-03T14:04:00.933509Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:152:2174], cookie=10997961908373388430, path="/foo/bar/baz") 2025-11-03T14:04:00.944469Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:152:2174], cookie=10997961908373388430, status=SUCCESS) 2025-11-03T14:04:00.944602Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:157:2179], cookie=4356415957456343268) 2025-11-03T14:04:00.955436Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:157:2179], cookie=4356415957456343268) 2025-11-03T14:04:00.957636Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:00.957662Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:00.957706Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:00.957813Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:01.000326Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:01.000459Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:198:2211], cookie=7692576781991310283) 2025-11-03T14:04:01.011612Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:198:2211], cookie=7692576781991310283) 2025-11-03T14:04:01.011823Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:206:2218], cookie=5626401720116130161, path="/foo/bar/baz") 2025-11-03T14:04:01.022920Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:206:2218], cookie=5626401720116130161, status=SUCCESS) 2025-11-03T14:04:01.023090Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:211:2223], cookie=17388703447644997862, path="/foo/bar/baz") 2025-11-03T14:04:01.023107Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:211:2223], cookie=17388703447644997862, status=PRECONDITION_FAILED) 2025-11-03T14:04:01.145894Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:01.145924Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:01.150834Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:01.151089Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:01.172675Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:01.172808Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:138:2162], cookie=13654517543696842546, name="Lock1") 2025-11-03T14:04:01.172845Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:138:2162], cookie=13654517543696842546) 2025-11-03T14:04:01.415792Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:01.415823Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:01.419015Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:01.419077Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:01.443239Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:01.443429Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2162], cookie=13574248276317370618, session=0, seqNo=0) 2025-11-03T14:04:01.443474Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:01.454392Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2162], cookie=13574248276317370618, session=1) 2025-11-03T14:04:01.454509Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:138:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-11-03T14:04:01.454571Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-11-03T14:04:01.454588Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:01.465467Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:138:2162], cookie=111) 2025-11-03T14:04:01.465650Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:149:2171], cookie=3254825177500354157, name="Lock1", force=0) 2025-11-03T14:04:01.476510Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:149:2171], cookie=3254825177500354157) 2025-11-03T14:04:01.476662Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:154:2176], cookie=5302291953136173965, name="Sem1", force=0) 2025-11-03T14:04:01.487702Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:154:2176], cookie=5302291953136173965) 2025-11-03T14:04:01.487855Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:159:2181], cookie=14189031006307501010, name="Sem1", limit=42) 2025-11-03T14:04:01.487900Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2025-11-03T14:04:01.498837Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:159:2181], cookie=14189031006307501010) 2025-11-03T14:04:01.499000Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:164:2186], cookie=16854449002399960416, name="Sem1", force=0) 2025-11-03T14:04:01.499033Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 2 "Sem1" 2025-11-03T14:04:01.510081Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:164:2186], cookie=16854449002399960416) 2025-11-03T14:04:01.510271Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:169:2191], cookie=13938797754669889497, name="Sem1", force=0) 2025-11-03T14:04:01.525884Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:169:2191], cookie=13938797754669889497) 2025-11-03T14:04:01.676786Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:01.676816Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:01.680000Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:01.680035Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:01.701311Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:01.701440Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2162], cookie=17066573304127646022, session=0, seqNo=0) 2025-11-03T14:04:01.701472Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:01.722349Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2162], cookie=17066573304127646022, session=1) 2025-11-03T14:04:01.722430Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2162], cookie=14918029412644004100, session=0, seqNo=0) 2025-11-03T14:04:01.722459Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:01.733068Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2162], cookie=14918029412644004100, session=2) 2025-11-03T14:04:01.733135Z node 4 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=2 from sender=[4:137:2162], cookie=10231766251866024515 2025-11-03T14:04:01.733229Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:150:2172], cookie=2153002299756036641, name="Sem1", limit=3) 2025-11-03T14:04:01.733257Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-11-03T14:04:01.743958Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:150:2172], cookie=2153002299756036641) 2025-11-03T14:04:01.744050Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2162], cookie=112, name="Sem1") 2025-11-03T14:04:01.744070Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2162], cookie=112) 2025-11-03T14:04:01.744097Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2162], cookie=113, name="Sem1") 2025-11-03T14:04:01.744104Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2162], cookie=113) 2025-11-03T14:04:01.744125Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2162], cookie=18208321313008169312, session=2, seqNo=0) 2025-11-03T14:04:01.756669Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927 ... :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:03.093887Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:03.104082Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2162], cookie=129, session=1, semaphore="Sem2" count=2) 2025-11-03T14:04:03.114783Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2162], cookie=129) 2025-11-03T14:04:03.114856Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2162], cookie=130, name="Sem2") 2025-11-03T14:04:03.114870Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2162], cookie=130) 2025-11-03T14:04:03.114907Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2162], cookie=131, session=1, semaphore="Sem2" count=1) 2025-11-03T14:04:03.125448Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2162], cookie=131) 2025-11-03T14:04:03.125510Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2162], cookie=132, name="Sem2") 2025-11-03T14:04:03.125523Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2162], cookie=132) 2025-11-03T14:04:03.125544Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2162], cookie=133, name="Sem2") 2025-11-03T14:04:03.125548Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2162], cookie=133) 2025-11-03T14:04:03.385368Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.385402Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.390041Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.390133Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.422069Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:03.423531Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:137:2162], cookie=8770346981190683026, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2025-11-03T14:04:03.423601Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root1" 2025-11-03T14:04:03.434406Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:137:2162], cookie=8770346981190683026) 2025-11-03T14:04:03.434564Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:147:2169], cookie=4159307905580558465, path="/Root1/Res", config={ }) 2025-11-03T14:04:03.434654Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2025-11-03T14:04:03.445628Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:147:2169], cookie=4159307905580558465) 2025-11-03T14:04:03.445802Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:152:2174], cookie=14275102470340022583, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2025-11-03T14:04:03.445858Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root2" 2025-11-03T14:04:03.456786Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:152:2174], cookie=14275102470340022583) 2025-11-03T14:04:03.456954Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:157:2179], cookie=17225268451911147880, path="/Root2/Res", config={ }) 2025-11-03T14:04:03.457011Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2025-11-03T14:04:03.467759Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:157:2179], cookie=17225268451911147880) 2025-11-03T14:04:03.467914Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:162:2184], cookie=1565977941491919150, path="/Root2/Res/Subres", config={ }) 2025-11-03T14:04:03.467978Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2025-11-03T14:04:03.478973Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:162:2184], cookie=1565977941491919150) 2025-11-03T14:04:03.479375Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:167:2189]. Cookie: 17780373242643925837. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:03.479398Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:167:2189], cookie=17780373242643925837) 2025-11-03T14:04:03.520308Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:167:2189]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-11-03T14:04:03.571011Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:167:2189]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-11-03T14:04:03.601460Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:167:2189]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-11-03T14:04:03.601600Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:175:2193]. Cookie: 11776957917356401111. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-11-03T14:04:03.601702Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:178:2196]. Cookie: 10347547776110729874. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:03.601708Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:178:2196], cookie=10347547776110729874) 2025-11-03T14:04:03.642402Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:178:2196]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-11-03T14:04:03.683216Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:178:2196]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-11-03T14:04:03.683447Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:184:2200]. Cookie: 3408537420797776268. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-11-03T14:04:03.683606Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:167:2189]. Cookie: 4949924709069184067. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:03.683616Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:167:2189], cookie=4949924709069184067) 2025-11-03T14:04:03.683712Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:178:2196]. Cookie: 10783551061894961751. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:03.683719Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:178:2196], cookie=10783551061894961751) 2025-11-03T14:04:03.714254Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:167:2189]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2025-11-03T14:04:03.714282Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:178:2196]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2025-11-03T14:04:03.714415Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:191:2207]. Cookie: 10941133622143720219. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } |56.1%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |56.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |56.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build >> OlapEstimationRowsCorrectness::TPCH3 >> TKesusTest::TestRegisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor >> TKesusTest::TestSessionDestroy >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource |56.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part2/pytest >> test.py::test[blocks-pg_sort--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2025-11-03T14:04:02.754724Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:02.754751Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:02.759901Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:02.759979Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:02.791939Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:02.792132Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=355375603986778289, session=0, seqNo=222) 2025-11-03T14:04:02.792171Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:02.802964Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=355375603986778289, session=1) 2025-11-03T14:04:02.803038Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:138:2163], cookie=10896120502299962717, session=1, seqNo=111) 2025-11-03T14:04:02.813851Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:138:2163], cookie=10896120502299962717, session=1) 2025-11-03T14:04:03.082558Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.082593Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.087581Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.087764Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.109194Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:03.109334Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:138:2162], cookie=111, session=0, seqNo=42) 2025-11-03T14:04:03.109364Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:03.109395Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:138:2162], cookie=222, session=1, seqNo=41) 2025-11-03T14:04:03.120110Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:138:2162], cookie=111, session=1) 2025-11-03T14:04:03.120138Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:138:2162], cookie=222, session=1) 2025-11-03T14:04:03.346927Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.346954Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.350289Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.350356Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.371709Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:03.371878Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2162], cookie=14704595859673611441, session=0, seqNo=0) 2025-11-03T14:04:03.371923Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:03.382756Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2162], cookie=14704595859673611441, session=1) 2025-11-03T14:04:03.383086Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:155:2177], cookie=6504892122973890875) 2025-11-03T14:04:03.383106Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:155:2177], cookie=6504892122973890875) 2025-11-03T14:04:03.644929Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.644956Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.648149Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.648178Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.669668Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:03.902337Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.902367Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.905616Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.905672Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.937322Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:03.937442Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2162], cookie=3923062868572498719, session=0, seqNo=0) 2025-11-03T14:04:03.937470Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:03.948432Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2162], cookie=3923062868572498719, session=1) 2025-11-03T14:04:03.948558Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-11-03T14:04:03.948614Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-11-03T14:04:03.948635Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:03.959484Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=111) 2025-11-03T14:04:03.959687Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:152:2174], cookie=419048027703141810, name="Sem1", limit=42) 2025-11-03T14:04:03.959732Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2025-11-03T14:04:03.970597Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:152:2174], cookie=419048027703141810) 2025-11-03T14:04:03.970764Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:157:2179], cookie=13382863745441667661, name="Sem1", limit=42) 2025-11-03T14:04:03.981663Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:157:2179], cookie=13382863745441667661) 2025-11-03T14:04:03.981812Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:162:2184], cookie=10393044848675579355, name="Sem1", limit=51) 2025-11-03T14:04:03.992662Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:162:2184], cookie=10393044848675579355) 2025-11-03T14:04:03.992819Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:167:2189], cookie=6319015042329934951, name="Lock1", limit=42) 2025-11-03T14:04:04.003838Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:167:2189], cookie=6319015042329934951) 2025-11-03T14:04:04.004003Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:172:2194], cookie=5375153648736538523, name="Lock1", limit=18446744073709551615) 2025-11-03T14:04:04.014987Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:172:2194], cookie=5375153648736538523) 2025-11-03T14:04:04.015172Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:177:2199], cookie=7672158207000437336, name="Sem1") 2025-11-03T14:04:04.015194Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:177:2199], cookie=7672158207000437336) 2025-11-03T14:04:04.015264Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:180:2202], cookie=2414006832801075563, name="Sem2") 2025-11-03T14:04:04.015272Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:180:2202], cookie=2414006832801075563) 2025-11-03T14:04:04.018397Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:04.018426Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:04.018491Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:04.018666Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:04.061238Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:04.061295Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:04.061416Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:219:2232], cookie=1001571941945139250, name="Sem1") 2025-11-03T14:04:04.061438Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:219:2232], cookie=1001571941945139250) 2025-11-03T14:04:04.061560Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:226:2238], cookie=11369574827888712441, name="Sem2") 2025-11-03T14:04:04.061570Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:226:2238], cookie=11369574827888712441) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/blobstorage/ut_blobstorage/ut_donor/unittest >> Donor::ConsistentWritesWhenSwitchingToDonorMode [GOOD] Test command err: RandomSeed# 14182541355376112436 Reassign# 4 -- VSlotId { NodeId: 5 PDiskId: 1000 VSlotId: 1000 } GroupId: 2181038080 GroupGeneration: 1 VDiskKind: "Default" FailDomainIdx: 4 VDiskMetrics { SatisfactionRank: 0 VSlotId { NodeId: 5 PDiskId: 1000 VSlotId: 1000 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } Status: "READY" Ready: true Put# [1:1:1:0:0:88:0] Put# [1:1:2:0:0:15:0] Put# [1:1:3:0:0:72:0] Put# [1:1:4:0:0:40:0] 2025-11-03T14:01:01.186614Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TVDiskGuidRecoveryActor: DECISION: [Decision# LostData SubsequentFailure# 0] 2025-11-03T14:01:01.187100Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) TVDiskGuidRecoveryActor: FINISH: [Decision# LostData Guid# 17691872915775028528] 2025-11-03T14:01:01.188177Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:2:0:0:15:1] 2025-11-03T14:01:01.188194Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:3:0:0:72:2] 2025-11-03T14:01:01.188202Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) THullOsirisActor: RESURRECT: id# [1:1:4:0:0:40:2] 2025-11-03T14:01:01.188252Z 9 00h00m20.011024s :BS_SYNCER ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) THullOsirisActor: FINISH: BlobsResurrected# 3 PartsResurrected# 3 Put# [1:1:5:0:0:47:0] Put# [1:1:6:0:0:40:0] Put# [1:1:7:0:0:65:0] Put# [1:1:8:0:0:50:0] Put# [1:1:9:0:0:96:0] Put# [1:1:10:0:0:46:0] Put# [1:1:11:0:0:76:0] Put# [1:1:12:0:0:86:0] Put# [1:1:13:0:0:84:0] Put# [1:1:14:0:0:58:0] Put# [1:1:15:0:0:64:0] Put# [1:1:16:0:0:71:0] Put# [1:1:17:0:0:88:0] Put# [1:1:18:0:0:91:0] Put# [1:1:19:0:0:55:0] Put# [1:1:20:0:0:2:0] Put# [1:1:21:0:0:35:0] Put# [1:1:22:0:0:38:0] Put# [1:1:23:0:0:32:0] Put# [1:1:24:0:0:26:0] Put# [1:1:25:0:0:92:0] Put# [1:1:26:0:0:25:0] Put# [1:1:27:0:0:84:0] Put# [1:1:28:0:0:51:0] Put# [1:1:29:0:0:33:0] Put# [1:1:30:0:0:86:0] Put# [1:1:31:0:0:60:0] Put# [1:1:32:0:0:45:0] Put# [1:1:33:0:0:32:0] Put# [1:1:34:0:0:6:0] Put# [1:1:35:0:0:100:0] Put# [1:1:36:0:0:98:0] Put# [1:1:37:0:0:5:0] Put# [1:1:38:0:0:56:0] Put# [1:1:39:0:0:34:0] Put# [1:1:40:0:0:80:0] Put# [1:1:41:0:0:21:0] Put# [1:1:42:0:0:100:0] Put# [1:1:43:0:0:38:0] Put# [1:1:44:0:0:80:0] Put# [1:1:45:0:0:83:0] Put# [1:1:46:0:0:22:0] Put# [1:1:47:0:0:87:0] Put# [1:1:48:0:0:48:0] Put# [1:1:49:0:0:63:0] Put# [1:1:50:0:0:15:0] Put# [1:1:51:0:0:72:0] Put# [1:1:52:0:0:39:0] Put# [1:1:53:0:0:34:0] Put# [1:1:54:0:0:83:0] Put# [1:1:55:0:0:21:0] Put# [1:1:56:0:0:84:0] Put# [1:1:57:0:0:23:0] Put# [1:1:58:0:0:76:0] Put# [1:1:59:0:0:92:0] Put# [1:1:60:0:0:86:0] Put# [1:1:61:0:0:30:0] Put# [1:1:62:0:0:69:0] Put# [1:1:63:0:0:79:0] Put# [1:1:64:0:0:2:0] Put# [1:1:65:0:0:2:0] Put# [1:1:66:0:0:39:0] Put# [1:1:67:0:0:31:0] Put# [1:1:68:0:0:48:0] Put# [1:1:69:0:0:52:0] Put# [1:1:70:0:0:11:0] Put# [1:1:71:0:0:35:0] Put# [1:1:72:0:0:45:0] Put# [1:1:73:0:0:79:0] Put# [1:1:74:0:0:29:0] Put# [1:1:75:0:0:98:0] Put# [1:1:76:0:0:52:0] Put# [1:1:77:0:0:90:0] Put# [1:1:78:0:0:7:0] Put# [1:1:79:0:0:43:0] Put# [1:1:80:0:0:33:0] Put# [1:1:81:0:0:4:0] Put# [1:1:82:0:0:79:0] Put# [1:1:83:0:0:69:0] Put# [1:1:84:0:0:8:0] Put# [1:1:85:0:0:79:0] Put# [1:1:86:0:0:49:0] Put# [1:1:87:0:0:18:0] Put# [1:1:88:0:0:27:0] Put# [1:1:89:0:0:10:0] Put# [1:1:90:0:0:32:0] Put# [1:1:91:0:0:71:0] Put# [1:1:92:0:0:64:0] Put# [1:1:93:0:0:47:0] Put# [1:1:94:0:0:55:0] Put# [1:1:95:0:0:1:0] Put# [1:1:96:0:0:67:0] Put# [1:1:97:0:0:66:0] Put# [1:1:98:0:0:72:0] Put# [1:1:99:0:0:23:0] Put# [1:1:100:0:0:56:0] Put# [1:1:101:0:0:79:0] Put# [1:1:102:0:0:11:0] Put# [1:1:103:0:0:38:0] Put# [1:1:104:0:0:99:0] Put# [1:1:105:0:0:27:0] Put# [1:1:106:0:0:42:0] Put# [1:1:107:0:0:48:0] Put# [1:1:108:0:0:68:0] Put# [1:1:109:0:0:80:0] Put# [1:1:110:0:0:15:0] Put# [1:1:111:0:0:37:0] Put# [1:1:112:0:0:47:0] Put# [1:1:113:0:0:92:0] Put# [1:1:114:0:0:2:0] Put# [1:1:115:0:0:68:0] Put# [1:1:116:0:0:94:0] Put# [1:1:117:0:0:54:0] Put# [1:1:118:0:0:70:0] Put# [1:1:119:0:0:22:0] Put# [1:1:120:0:0:62:0] Put# [1:1:121:0:0:58:0] Put# [1:1:122:0:0:69:0] Put# [1:1:123:0:0:81:0] Put# [1:1:124:0:0:17:0] Put# [1:1:125:0:0:22:0] Put# [1:1:126:0:0:53:0] Put# [1:1:127:0:0:6:0] Put# [1:1:128:0:0:95:0] Put# [1:1:129:0:0:71:0] Put# [1:1:130:0:0:9:0] Put# [1:1:131:0:0:90:0] Put# [1:1:132:0:0:57:0] Put# [1:1:133:0:0:1:0] Put# [1:1:134:0:0:94:0] Put# [1:1:135:0:0:42:0] Put# [1:1:136:0:0:30:0] Put# [1:1:137:0:0:80:0] Put# [1:1:138:0:0:70:0] Put# [1:1:139:0:0:96:0] Put# [1:1:140:0:0:38:0] Put# [1:1:141:0:0:18:0] Put# [1:1:142:0:0:68:0] Put# [1:1:143:0:0:45:0] Put# [1:1:144:0:0:21:0] Put# [1:1:145:0:0:27:0] Put# [1:1:146:0:0:13:0] Put# [1:1:147:0:0:81:0] Put# [1:1:148:0:0:53:0] Put# [1:1:149:0:0:25:0] Put# [1:1:150:0:0:95:0] Put# [1:1:151:0:0:70:0] Put# [1:1:152:0:0:73:0] Put# [1:1:153:0:0:60:0] Put# [1:1:154:0:0:82:0] Put# [1:1:155:0:0:49:0] Put# [1:1:156:0:0:35:0] Put# [1:1:157:0:0:88:0] Put# [1:1:158:0:0:48:0] Put# [1:1:159:0:0:22:0] Put# [1:1:160:0:0:40:0] Put# [1:1:161:0:0:48:0] Put# [1:1:162:0:0:24:0] Put# [1:1:163:0:0:22:0] Put# [1:1:164:0:0:68:0] Put# [1:1:165:0:0:71:0] Put# [1:1:166:0:0:90:0] Put# [1:1:167:0:0:6:0] Put# [1:1:168:0:0:35:0] Put# [1:1:169:0:0:24:0] Put# [1:1:170:0:0:3:0] Put# [1:1:171:0:0:72:0] Put# [1:1:172:0:0:40:0] Put# [1:1:173:0:0:43:0] Put# [1:1:174:0:0:87:0] Put# [1:1:175:0:0:90:0] Put# [1:1:176:0:0:57:0] Put# [1:1:177:0:0:63:0] Put# [1:1:178:0:0:10:0] Put# [1:1:179:0:0:64:0] Put# [1:1:180:0:0:100:0] Put# [1:1:181:0:0:65:0] Put# [1:1:182:0:0:86:0] Put# [1:1:183:0:0:65:0] Put# [1:1:184:0:0:38:0] Put# [1:1:185:0:0:69:0] Put# [1:1:186:0:0:85:0] Put# [1:1:187:0:0:32:0] Put# [1:1:188:0:0:33:0] Put# [1:1:189:0:0:68:0] Put# [1:1:190:0:0:91:0] Put# [1:1:191:0:0:96:0] Put# [1:1:192:0:0:87:0] Put# [1:1:193:0:0:65:0] Put# [1:1:194:0:0:32:0] Put# [1:1:195:0:0:78:0] Put# [1:1:196:0:0:18:0] Put# [1:1:197:0:0:31:0] Put# [1:1:198:0:0:21:0] Put# [1:1:199:0:0:29:0] Put# [1:1:200:0:0:17:0] Put# [1:1:201:0:0:67:0] Put# [1:1:202:0:0:24:0] Put# [1:1:203:0:0:99:0] Put# [1:1:204:0:0:53:0] Put# [1:1:205:0:0:7:0] Put# [1:1:206:0:0:91:0] Put# [1:1:207:0:0:2:0] Put# [1:1:208:0:0:43:0] Put# [1:1:209:0:0:41:0] Put# [1:1:210:0:0:95:0] Put# [1:1:211:0:0:94:0] Put# [1:1:212:0:0:7:0] Put# [1:1:213:0:0:25:0] Put# [1:1:214:0:0:75:0] Put# [1:1:215:0:0:10:0] Put# [1:1:216:0:0:81:0] Put# [1:1:217:0:0:42:0] Put# [1:1:218:0:0:6:0] Put# [1:1:219:0:0:10:0] Put# [1:1:220:0:0:8:0] Put# [1:1:221:0:0:44:0] Put# [1:1:222:0:0:93:0] Put# [1:1:223:0:0:56:0] Put# [1:1:224:0:0:36:0] Put# [1:1:225:0:0:48:0] Put# [1:1:226:0:0:22:0] Put# [1:1:227:0:0:63:0] Put# [1:1:228:0:0:37:0] Put# [1:1:229:0:0:60:0] Put# [1:1:230:0:0:69:0] Put# [1:1:231:0:0:18:0] Put# [1:1:232:0:0:69:0] Put# [1:1:233:0:0:31:0] Put# [1:1:234:0:0:69:0] Put# [1:1:235:0:0:80:0] Put# [1:1:236:0:0:43:0] Put# [1:1:237:0:0:93:0] Put# [1:1:238:0:0:17:0] Put# [1:1:239:0:0:16:0] Put# [1:1:240:0:0:25:0] Put# [1:1:241:0:0:61:0] Put# [1:1:242:0:0:92:0] Put# [1:1:243:0:0:66:0] Put# [1:1:244:0:0:27:0] Put# [1:1:245:0:0:31:0] Put# [1:1:246:0:0:19:0] Put# [1:1:247:0:0:52:0] Put# [1:1:248:0:0:49:0] Put# [1:1:249:0:0:70:0] Put# [1:1:250:0:0:98:0] Put# [1:1:251:0:0:91:0] Put# [1:1:252:0:0:9:0] Put# [1:1:253:0:0:36:0] Put# [1:1:254:0:0:59:0] Put# [1:1:255:0:0:71:0] Put# [1:1:256:0:0:9:0] Put# [1:1:257:0:0:58:0] Put# [1:1:258:0:0:11:0] Put# [1:1:259:0:0:38:0] Put# [1:1:260:0:0:49:0] Put# [1:1:261:0:0:45:0] Put# [1:1:262:0:0:71:0] Put# [1:1:263:0:0:79:0] Put# [1:1:264:0:0:75:0] Put# [1:1:265:0:0:51:0] Put# [1:1:266:0:0:40:0] Put# [1:1:267:0:0:12:0] Put# [1:1:268:0:0:22:0] Put# [1:1:269:0:0:70:0] Put# [1:1:270:0:0:26:0] Put# [1:1:271:0:0:80:0] Put# [1:1:272:0:0:1:0] Put# [1:1:273:0:0:77:0] Put# [1:1:274:0:0:95:0] Put# [1:1:275:0:0:26:0] Put# [1:1:276:0:0:2:0] Put# [1:1:277:0:0:51:0] Put# [1:1:278:0:0:73:0] Put# [1:1:279:0:0:21:0] Put# [1:1:280:0:0:99:0] Put# [1:1:281:0:0:21:0] Put# [1:1:282:0:0:69:0] Put# [1:1:283:0:0:6:0] Put# [1:1:284:0:0:83:0] Put# [1:1:285:0:0:73:0] Put# [1:1:286:0:0:34:0] Put# [1:1:287:0:0:70:0] Put# [1:1:288:0:0:46:0] Put# [1:1:289:0:0:86:0] Put# [1:1:290:0:0:3:0] Put# [1:1:291:0:0:31:0] Put# [1:1:292:0:0:52:0] Put# [1:1:293:0:0:52:0] Put# [1:1:294:0:0:23:0] Put# [1:1:295:0:0:71:0] Put# [1:1:296:0:0:83:0] Put# [1:1:297:0:0:69:0] Put# [1:1:298:0:0:18:0] Put# [1:1:299:0:0:68:0] Put# [1:1:300:0:0:4:0] Put# [1:1:301:0:0:26:0] Put# [1:1:302:0:0:57:0] Put# [1:1:303:0:0:41:0] Put# [1:1:304:0:0:70:0] Put# [1:1:305:0:0:92:0] Put# [1:1:306:0:0:94:0] Put# [1:1:307:0:0:4:0] Put# [1:1:308:0:0:41:0] Put# [1:1:309:0:0:7:0] Put# [1:1:310:0:0:6:0] Put# [1:1:311:0:0:38:0] Put# [1:1:312:0:0:68:0] Put# [1:1:313:0:0:35:0] Put# [1:1:314:0:0:87:0] Put# [1:1:315:0:0:96:0] Put# [1:1:316:0:0:6:0] Put# [1:1:317:0:0:69:0] Put# [1:1:318:0:0:58:0] Put# [1:1:319:0:0:25:0] Put# [1:1:320:0:0:13:0] Put# [1:1:321:0:0:64:0] Put# [1:1:322:0:0:85:0] Put# [1:1:323:0:0:64:0] Put# [1:1:324:0:0:60:0] Put# [1:1:325:0:0:23:0] Put# [1:1:326:0:0:92:0] Put# [1:1:327:0:0:94:0] Put# [1:1:328:0:0:62:0] Put# [1:1:329:0:0:75:0] Put# [1:1:330:0:0:94:0] Put# [1:1:331:0:0:15:0] Put# [1:1:332:0:0:46:0] Put# [1:1:333:0:0:35:0] Put# [1:1:334:0:0:48:0] Put# [1:1:335:0:0:78:0] Put# [1:1:336:0:0:19:0] Put# [1:1:337:0:0:55:0] Put# [1:1:338:0:0:50:0] Put# [1:1:339:0:0:65:0] Put# [1:1:340:0:0:30:0] Put# [1:1:341:0:0:17:0] Put# [1:1:342:0:0:5:0] Put# [1:1:343:0:0:72:0] Put# [1:1:344:0:0:23:0] Put# [1:1:345:0:0:77:0] Put# [1:1:346:0:0:20:0] Put# [1:1:347:0:0:37:0] Put# [1:1:348:0:0:49:0] Put# [1:1:349:0:0:62:0] Put# [1:1:350:0:0:13:0] Put# [1:1:351:0:0:73:0] Put# [1:1:352:0:0:57:0] Put# [1:1:353:0:0:57:0] Put# [1:1:354:0:0:82:0] Put# [1:1:355:0:0:92:0] Put# [1:1:356:0:0:15:0] Put# [1:1:357:0:0:96:0] Put# [1:1:358:0:0:2:0] Put# [1:1:359:0:0:11:0] Put# [1:1:360:0:0:1:0] Put# [1:1:361:0:0:40:0] Put# [1:1:362:0:0:78:0] Put# [1:1:363:0:0:1:0] Put# [1:1:364:0:0:89:0] Put# [1:1:365:0:0:25:0] Put# [1:1:366:0:0:87:0] Put# [1:1:367:0:0:62:0] Put# [1:1:368:0:0:81:0] Put# [1:1:369:0:0:89:0] Put# [1:1:370:0:0:22:0] Put# [1:1:371:0:0:41:0] Put# [1:1:372:0:0:60:0] Put# [1:1:373:0:0:23:0] Put# [1:1:374:0:0:60:0] Put# [1:1:375:0:0:20:0] Put# [1:1:376:0:0:6:0] Put# [1:1:377:0:0:86:0] Put# [1:1:378:0:0:28:0] Put# [1:1:379:0:0:85:0] Put# [1:1:380:0:0:82:0] Put# [1:1:381:0:0:11:0] Put# [1:1:382:0:0:79:0] Put# [1:1:383:0:0:8:0] Put# [1:1:384:0:0:98:0] Put# [1:1:385:0:0:19:0] Put# [1:1:386:0:0:35:0] Put# [1:1:387:0:0:52:0] Put# [1:1:388:0:0:99:0] Put# [1:1:389:0:0:6:0] Put# [1:1:390:0:0:8:0] Put# [1:1:391:0:0:6:0] Put# [1:1:392:0:0:10:0] Put# [1:1:393:0:0:31:0] Put# [1:1:394:0:0:78:0] Put# [1:1:395:0:0:45:0] Put# [1:1:396:0:0:49:0] Put# [1:1:397:0:0:41:0] Put# [1:1:398:0:0:9:0] Put# [1:1:399:0:0:1:0] Put# [1:1:400:0:0:48:0] Put# [1:1:401:0:0:20:0] Put# [1:1:402:0:0:32:0] Put# [1:1:403:0:0:18:0] Put# [1:1:404:0:0:58:0] Put# [1:1:405:0:0:26:0] Put# [1:1:406:0:0:54:0] Put# [1:1:407:0:0:22:0] Put# [1:1:408:0:0:77:0] Put# [1:1:409:0:0:65:0] Put# [1:1:410:0:0:91:0] Put# [1:1:411:0:0:64:0] Put# [1:1:412:0:0:75:0] Put# [1:1:413:0:0:36:0] Put# [1:1:414:0:0:7:0] Put# [1:1:415:0:0:74:0] Put# [1:1:416:0:0:89:0] Put# [1:1:417:0:0:7:0] Put# [1:1:418:0:0:39:0] Put# [1:1:419:0:0:25:0] Put# [1:1:420:0:0:96:0] Put# [1:1:421:0:0:91:0] Put# [1:1:422:0:0:36:0] Put# [1:1:423:0:0:40:0] Put# [1:1:424:0:0:24:0] Put# [1:1:425:0:0:65:0] Put# [1:1:426:0:0:1:0] Put# [1:1:427:0:0:25:0] Put# [1:1:428:0:0:6:0] Put# [1:1:429:0:0:11:0] Put# [1:1:430:0:0:17:0] Put# [1:1:431:0:0:32:0] Put# [1:1:432:0:0:54:0] Put# [1:1:433:0:0:36:0] Put# [1:1:434:0:0:70:0] Put# [1:1:435:0:0:88:0] Put# [1:1:436:0:0:18:0] Put# [1:1:437:0:0:40:0] Put# [1:1:438:0:0:69:0] Put# [1:1:439:0:0:3:0] Put# [1:1:440:0:0:93:0] Put# [1:1:441:0:0:26:0] Put# [1:1:442:0:0:43:0] Put# [1:1:443:0:0:15:0] Put# [1:1:444:0:0:4:0] Put# [1:1:445:0:0:85:0] Put# [1:1:446:0:0:23: ... [1:19:9538:0:0:25:0] Put# [1:19:9539:0:0:27:0] Put# [1:19:9540:0:0:30:0] Put# [1:19:9541:0:0:40:0] Put# [1:19:9542:0:0:18:0] Put# [1:19:9543:0:0:40:0] Put# [1:19:9544:0:0:22:0] Put# [1:19:9545:0:0:33:0] Put# [1:19:9546:0:0:23:0] Put# [1:19:9547:0:0:12:0] Put# [1:19:9548:0:0:8:0] Put# [1:19:9549:0:0:68:0] Put# [1:19:9550:0:0:70:0] Put# [1:19:9551:0:0:78:0] Put# [1:19:9552:0:0:100:0] Put# [1:19:9553:0:0:49:0] Put# [1:19:9554:0:0:72:0] Put# [1:19:9555:0:0:100:0] Put# [1:19:9556:0:0:31:0] Put# [1:19:9557:0:0:78:0] Put# [1:19:9558:0:0:99:0] Put# [1:19:9559:0:0:37:0] Put# [1:19:9560:0:0:88:0] Put# [1:19:9561:0:0:38:0] Put# [1:19:9562:0:0:26:0] Put# [1:19:9563:0:0:20:0] Put# [1:19:9564:0:0:44:0] Put# [1:19:9565:0:0:97:0] Put# [1:19:9566:0:0:46:0] Put# [1:19:9567:0:0:56:0] Put# [1:19:9568:0:0:99:0] Put# [1:19:9569:0:0:49:0] Put# [1:19:9570:0:0:88:0] Put# [1:19:9571:0:0:17:0] Put# [1:19:9572:0:0:81:0] Put# [1:19:9573:0:0:70:0] Put# [1:19:9574:0:0:27:0] Put# [1:19:9575:0:0:71:0] Put# [1:19:9576:0:0:40:0] Put# [1:19:9577:0:0:49:0] Put# [1:19:9578:0:0:49:0] Put# [1:19:9579:0:0:98:0] Put# [1:19:9580:0:0:5:0] Put# [1:19:9581:0:0:77:0] Put# [1:19:9582:0:0:90:0] Put# [1:19:9583:0:0:34:0] Put# [1:19:9584:0:0:41:0] Put# [1:19:9585:0:0:23:0] Put# [1:19:9586:0:0:34:0] Put# [1:19:9587:0:0:61:0] Put# [1:19:9588:0:0:4:0] Put# [1:19:9589:0:0:87:0] Put# [1:19:9590:0:0:24:0] Put# [1:19:9591:0:0:2:0] Put# [1:19:9592:0:0:74:0] Put# [1:19:9593:0:0:13:0] Put# [1:19:9594:0:0:41:0] Put# [1:19:9595:0:0:48:0] Put# [1:19:9596:0:0:97:0] Put# [1:19:9597:0:0:20:0] Put# [1:19:9598:0:0:78:0] Put# [1:19:9599:0:0:23:0] Put# [1:19:9600:0:0:93:0] Put# [1:19:9601:0:0:39:0] Put# [1:19:9602:0:0:79:0] Put# [1:19:9603:0:0:28:0] Put# [1:19:9604:0:0:7:0] Put# [1:19:9605:0:0:95:0] Put# [1:19:9606:0:0:93:0] Put# [1:19:9607:0:0:95:0] Put# [1:19:9608:0:0:36:0] Put# [1:19:9609:0:0:34:0] Put# [1:19:9610:0:0:96:0] Put# [1:19:9611:0:0:93:0] Put# [1:19:9612:0:0:34:0] Put# [1:19:9613:0:0:57:0] Put# [1:19:9614:0:0:84:0] Put# [1:19:9615:0:0:46:0] Put# [1:19:9616:0:0:41:0] Put# [1:19:9617:0:0:19:0] Put# [1:19:9618:0:0:8:0] Put# [1:19:9619:0:0:77:0] Put# [1:19:9620:0:0:27:0] Put# [1:19:9621:0:0:32:0] Put# [1:19:9622:0:0:86:0] Put# [1:19:9623:0:0:66:0] Put# [1:19:9624:0:0:53:0] Put# [1:19:9625:0:0:9:0] Put# [1:19:9626:0:0:23:0] Put# [1:19:9627:0:0:18:0] Put# [1:19:9628:0:0:86:0] Put# [1:19:9629:0:0:87:0] Put# [1:19:9630:0:0:78:0] Put# [1:19:9631:0:0:58:0] Put# [1:19:9632:0:0:67:0] Put# [1:19:9633:0:0:21:0] Put# [1:19:9634:0:0:58:0] Put# [1:19:9635:0:0:99:0] Put# [1:19:9636:0:0:53:0] Put# [1:19:9637:0:0:83:0] Put# [1:19:9638:0:0:27:0] Put# [1:19:9639:0:0:75:0] Put# [1:19:9640:0:0:94:0] Put# [1:19:9641:0:0:50:0] Put# [1:19:9642:0:0:31:0] Put# [1:19:9643:0:0:29:0] Put# [1:19:9644:0:0:41:0] Put# [1:19:9645:0:0:64:0] Put# [1:19:9646:0:0:52:0] Put# [1:19:9647:0:0:60:0] Put# [1:19:9648:0:0:44:0] Put# [1:19:9649:0:0:47:0] Put# [1:19:9650:0:0:90:0] Put# [1:19:9651:0:0:72:0] Put# [1:19:9652:0:0:19:0] Put# [1:19:9653:0:0:69:0] Put# [1:19:9654:0:0:49:0] Put# [1:19:9655:0:0:51:0] Put# [1:19:9656:0:0:29:0] Put# [1:19:9657:0:0:37:0] Put# [1:19:9658:0:0:19:0] Put# [1:19:9659:0:0:77:0] Put# [1:19:9660:0:0:51:0] Put# [1:19:9661:0:0:83:0] Put# [1:19:9662:0:0:76:0] Put# [1:19:9663:0:0:76:0] Put# [1:19:9664:0:0:14:0] Put# [1:19:9665:0:0:52:0] Put# [1:19:9666:0:0:59:0] Put# [1:19:9667:0:0:29:0] Put# [1:19:9668:0:0:34:0] Put# [1:19:9669:0:0:20:0] Put# [1:19:9670:0:0:3:0] Put# [1:19:9671:0:0:62:0] Put# [1:19:9672:0:0:84:0] Put# [1:19:9673:0:0:32:0] Put# [1:19:9674:0:0:26:0] Put# [1:19:9675:0:0:56:0] Put# [1:19:9676:0:0:49:0] Put# [1:19:9677:0:0:64:0] Put# [1:19:9678:0:0:56:0] Put# [1:19:9679:0:0:3:0] Put# [1:19:9680:0:0:84:0] Put# [1:19:9681:0:0:96:0] Put# [1:19:9682:0:0:5:0] Put# [1:19:9683:0:0:84:0] Put# [1:19:9684:0:0:4:0] Put# [1:19:9685:0:0:75:0] Put# [1:19:9686:0:0:43:0] Put# [1:19:9687:0:0:37:0] Put# [1:19:9688:0:0:54:0] Put# [1:19:9689:0:0:81:0] Put# [1:19:9690:0:0:25:0] Put# [1:19:9691:0:0:91:0] Put# [1:19:9692:0:0:11:0] Put# [1:19:9693:0:0:92:0] Put# [1:19:9694:0:0:91:0] Put# [1:19:9695:0:0:3:0] Put# [1:19:9696:0:0:82:0] Put# [1:19:9697:0:0:29:0] Put# [1:19:9698:0:0:50:0] Put# [1:19:9699:0:0:11:0] Put# [1:19:9700:0:0:55:0] Put# [1:19:9701:0:0:85:0] Put# [1:19:9702:0:0:29:0] Put# [1:19:9703:0:0:73:0] Put# [1:19:9704:0:0:67:0] Put# [1:19:9705:0:0:11:0] Put# [1:19:9706:0:0:15:0] Put# [1:19:9707:0:0:58:0] Put# [1:19:9708:0:0:97:0] Put# [1:19:9709:0:0:50:0] Put# [1:19:9710:0:0:76:0] Put# [1:19:9711:0:0:99:0] Put# [1:19:9712:0:0:64:0] Put# [1:19:9713:0:0:51:0] Put# [1:19:9714:0:0:27:0] Put# [1:19:9715:0:0:49:0] Put# [1:19:9716:0:0:79:0] Put# [1:19:9717:0:0:35:0] Put# [1:19:9718:0:0:43:0] Put# [1:19:9719:0:0:31:0] Put# [1:19:9720:0:0:76:0] Put# [1:19:9721:0:0:92:0] Put# [1:19:9722:0:0:93:0] Put# [1:19:9723:0:0:64:0] Put# [1:19:9724:0:0:82:0] Put# [1:19:9725:0:0:71:0] Put# [1:19:9726:0:0:80:0] Put# [1:19:9727:0:0:72:0] Put# [1:19:9728:0:0:33:0] Put# [1:19:9729:0:0:35:0] Put# [1:19:9730:0:0:87:0] Put# [1:19:9731:0:0:56:0] Put# [1:19:9732:0:0:42:0] Put# [1:19:9733:0:0:19:0] Put# [1:19:9734:0:0:27:0] Put# [1:19:9735:0:0:27:0] Put# [1:19:9736:0:0:34:0] Put# [1:19:9737:0:0:31:0] Put# [1:19:9738:0:0:11:0] Put# [1:19:9739:0:0:32:0] Put# [1:19:9740:0:0:19:0] Put# [1:19:9741:0:0:37:0] Put# [1:19:9742:0:0:94:0] Put# [1:19:9743:0:0:64:0] Put# [1:19:9744:0:0:18:0] Put# [1:19:9745:0:0:83:0] Put# [1:19:9746:0:0:94:0] Put# [1:19:9747:0:0:22:0] Put# [1:19:9748:0:0:14:0] Put# [1:19:9749:0:0:63:0] Put# [1:19:9750:0:0:14:0] Put# [1:19:9751:0:0:52:0] Put# [1:19:9752:0:0:59:0] Put# [1:19:9753:0:0:62:0] Put# [1:19:9754:0:0:66:0] Put# [1:19:9755:0:0:1:0] Put# [1:19:9756:0:0:32:0] Put# [1:19:9757:0:0:78:0] Put# [1:19:9758:0:0:65:0] Put# [1:19:9759:0:0:21:0] Put# [1:19:9760:0:0:9:0] Put# [1:19:9761:0:0:84:0] Put# [1:19:9762:0:0:58:0] Put# [1:19:9763:0:0:54:0] Put# [1:19:9764:0:0:78:0] Put# [1:19:9765:0:0:9:0] Put# [1:19:9766:0:0:77:0] Put# [1:19:9767:0:0:88:0] Put# [1:19:9768:0:0:82:0] Put# [1:19:9769:0:0:40:0] Put# [1:19:9770:0:0:67:0] Put# [1:19:9771:0:0:55:0] Put# [1:19:9772:0:0:83:0] Put# [1:19:9773:0:0:81:0] Put# [1:19:9774:0:0:8:0] Put# [1:19:9775:0:0:51:0] Put# [1:19:9776:0:0:23:0] Put# [1:19:9777:0:0:28:0] Put# [1:19:9778:0:0:24:0] Put# [1:19:9779:0:0:30:0] Put# [1:19:9780:0:0:25:0] Put# [1:19:9781:0:0:69:0] Put# [1:19:9782:0:0:39:0] Put# [1:19:9783:0:0:84:0] Put# [1:19:9784:0:0:42:0] Put# [1:19:9785:0:0:53:0] Put# [1:19:9786:0:0:71:0] Put# [1:19:9787:0:0:63:0] Put# [1:19:9788:0:0:88:0] Put# [1:19:9789:0:0:50:0] Put# [1:19:9790:0:0:87:0] Put# [1:19:9791:0:0:81:0] Put# [1:19:9792:0:0:74:0] Put# [1:19:9793:0:0:91:0] Put# [1:19:9794:0:0:2:0] Put# [1:19:9795:0:0:42:0] Put# [1:19:9796:0:0:13:0] Put# [1:19:9797:0:0:63:0] Put# [1:19:9798:0:0:24:0] Put# [1:19:9799:0:0:62:0] Put# [1:19:9800:0:0:44:0] Put# [1:19:9801:0:0:28:0] Put# [1:19:9802:0:0:59:0] Put# [1:19:9803:0:0:54:0] Put# [1:19:9804:0:0:65:0] Put# [1:19:9805:0:0:42:0] Put# [1:19:9806:0:0:14:0] Put# [1:19:9807:0:0:39:0] Put# [1:19:9808:0:0:5:0] Put# [1:19:9809:0:0:90:0] Put# [1:19:9810:0:0:35:0] Put# [1:19:9811:0:0:11:0] Put# [1:19:9812:0:0:70:0] Put# [1:19:9813:0:0:76:0] Put# [1:19:9814:0:0:27:0] Put# [1:19:9815:0:0:67:0] Put# [1:19:9816:0:0:9:0] Put# [1:19:9817:0:0:10:0] Put# [1:19:9818:0:0:69:0] Put# [1:19:9819:0:0:98:0] Put# [1:19:9820:0:0:84:0] Put# [1:19:9821:0:0:32:0] Put# [1:19:9822:0:0:30:0] Put# [1:19:9823:0:0:13:0] Put# [1:19:9824:0:0:16:0] Put# [1:19:9825:0:0:78:0] Put# [1:19:9826:0:0:62:0] Put# [1:19:9827:0:0:41:0] Put# [1:19:9828:0:0:6:0] Put# [1:19:9829:0:0:16:0] Put# [1:19:9830:0:0:6:0] Put# [1:19:9831:0:0:38:0] Put# [1:19:9832:0:0:46:0] Put# [1:19:9833:0:0:78:0] Put# [1:19:9834:0:0:33:0] Put# [1:19:9835:0:0:83:0] Put# [1:19:9836:0:0:90:0] Put# [1:19:9837:0:0:82:0] Put# [1:19:9838:0:0:4:0] Put# [1:19:9839:0:0:9:0] Put# [1:19:9840:0:0:84:0] Put# [1:19:9841:0:0:39:0] Put# [1:19:9842:0:0:2:0] Put# [1:19:9843:0:0:57:0] Put# [1:19:9844:0:0:91:0] Put# [1:19:9845:0:0:12:0] Put# [1:19:9846:0:0:73:0] Put# [1:19:9847:0:0:28:0] Put# [1:19:9848:0:0:6:0] Put# [1:19:9849:0:0:88:0] Put# [1:19:9850:0:0:40:0] Put# [1:19:9851:0:0:31:0] Put# [1:19:9852:0:0:72:0] Put# [1:19:9853:0:0:4:0] Put# [1:19:9854:0:0:69:0] Put# [1:19:9855:0:0:19:0] Put# [1:19:9856:0:0:100:0] Put# [1:19:9857:0:0:96:0] Put# [1:19:9858:0:0:29:0] Put# [1:19:9859:0:0:56:0] Put# [1:19:9860:0:0:15:0] Put# [1:19:9861:0:0:86:0] Put# [1:19:9862:0:0:23:0] Put# [1:19:9863:0:0:68:0] Put# [1:19:9864:0:0:72:0] Put# [1:19:9865:0:0:67:0] Put# [1:19:9866:0:0:54:0] Put# [1:19:9867:0:0:9:0] Put# [1:19:9868:0:0:25:0] Put# [1:19:9869:0:0:41:0] Put# [1:19:9870:0:0:54:0] Put# [1:19:9871:0:0:99:0] Put# [1:19:9872:0:0:94:0] Put# [1:19:9873:0:0:36:0] Put# [1:19:9874:0:0:91:0] Put# [1:19:9875:0:0:96:0] Put# [1:19:9876:0:0:65:0] Put# [1:19:9877:0:0:70:0] Put# [1:19:9878:0:0:88:0] Put# [1:19:9879:0:0:71:0] Put# [1:19:9880:0:0:57:0] Put# [1:19:9881:0:0:76:0] Put# [1:19:9882:0:0:87:0] Put# [1:19:9883:0:0:77:0] Put# [1:19:9884:0:0:32:0] Put# [1:19:9885:0:0:90:0] Put# [1:19:9886:0:0:37:0] Put# [1:19:9887:0:0:35:0] Put# [1:19:9888:0:0:56:0] Put# [1:19:9889:0:0:73:0] Put# [1:19:9890:0:0:5:0] Put# [1:19:9891:0:0:100:0] Put# [1:19:9892:0:0:99:0] Put# [1:19:9893:0:0:64:0] Put# [1:19:9894:0:0:44:0] Put# [1:19:9895:0:0:54:0] Put# [1:19:9896:0:0:16:0] Put# [1:19:9897:0:0:21:0] Put# [1:19:9898:0:0:22:0] Put# [1:19:9899:0:0:69:0] Put# [1:19:9900:0:0:33:0] Put# [1:19:9901:0:0:98:0] Put# [1:19:9902:0:0:52:0] Put# [1:19:9903:0:0:93:0] Put# [1:19:9904:0:0:26:0] Put# [1:19:9905:0:0:26:0] Put# [1:19:9906:0:0:90:0] Put# [1:19:9907:0:0:83:0] Put# [1:19:9908:0:0:66:0] Put# [1:19:9909:0:0:90:0] Put# [1:19:9910:0:0:92:0] Put# [1:19:9911:0:0:68:0] Put# [1:19:9912:0:0:4:0] Put# [1:19:9913:0:0:87:0] Put# [1:19:9914:0:0:97:0] Put# [1:19:9915:0:0:7:0] Put# [1:19:9916:0:0:23:0] Put# [1:19:9917:0:0:3:0] Put# [1:19:9918:0:0:69:0] Put# [1:19:9919:0:0:81:0] Put# [1:19:9920:0:0:45:0] Put# [1:19:9921:0:0:82:0] Put# [1:19:9922:0:0:81:0] Put# [1:19:9923:0:0:57:0] Put# [1:19:9924:0:0:4:0] Put# [1:19:9925:0:0:79:0] Put# [1:19:9926:0:0:70:0] Put# [1:19:9927:0:0:45:0] Put# [1:19:9928:0:0:83:0] Put# [1:19:9929:0:0:59:0] Put# [1:19:9930:0:0:97:0] Put# [1:19:9931:0:0:49:0] Put# [1:19:9932:0:0:31:0] Put# [1:19:9933:0:0:75:0] Put# [1:19:9934:0:0:43:0] Put# [1:19:9935:0:0:44:0] Put# [1:19:9936:0:0:15:0] Put# [1:19:9937:0:0:53:0] Put# [1:19:9938:0:0:6:0] Put# [1:19:9939:0:0:15:0] Put# [1:19:9940:0:0:5:0] Put# [1:19:9941:0:0:1:0] Put# [1:19:9942:0:0:80:0] Put# [1:19:9943:0:0:43:0] Put# [1:19:9944:0:0:37:0] Put# [1:19:9945:0:0:100:0] Put# [1:19:9946:0:0:26:0] Put# [1:19:9947:0:0:78:0] Put# [1:19:9948:0:0:84:0] Put# [1:19:9949:0:0:78:0] Put# [1:19:9950:0:0:46:0] Put# [1:19:9951:0:0:3:0] Put# [1:19:9952:0:0:81:0] Put# [1:19:9953:0:0:32:0] Put# [1:19:9954:0:0:34:0] Put# [1:19:9955:0:0:2:0] Put# [1:19:9956:0:0:91:0] Put# [1:19:9957:0:0:31:0] Put# [1:19:9958:0:0:56:0] Put# [1:19:9959:0:0:69:0] Put# [1:19:9960:0:0:26:0] Put# [1:19:9961:0:0:21:0] Put# [1:19:9962:0:0:68:0] Put# [1:19:9963:0:0:97:0] Put# [1:19:9964:0:0:92:0] Put# [1:19:9965:0:0:39:0] Put# [1:19:9966:0:0:34:0] Put# [1:19:9967:0:0:52:0] Put# [1:19:9968:0:0:6:0] Put# [1:19:9969:0:0:36:0] Put# [1:19:9970:0:0:2:0] Put# [1:19:9971:0:0:53:0] Put# [1:19:9972:0:0:49:0] Put# [1:19:9973:0:0:71:0] Put# [1:19:9974:0:0:56:0] Put# [1:19:9975:0:0:100:0] Put# [1:19:9976:0:0:16:0] Put# [1:19:9977:0:0:60:0] Put# [1:19:9978:0:0:26:0] Put# [1:19:9979:0:0:94:0] Put# [1:19:9980:0:0:15:0] Put# [1:19:9981:0:0:77:0] Put# [1:19:9982:0:0:18:0] Put# [1:19:9983:0:0:88:0] Put# [1:19:9984:0:0:53:0] Put# [1:19:9985:0:0:54:0] Put# [1:19:9986:0:0:45:0] Put# [1:19:9987:0:0:60:0] Put# [1:19:9988:0:0:83:0] Put# [1:19:9989:0:0:30:0] Put# [1:19:9990:0:0:76:0] Put# [1:19:9991:0:0:31:0] Put# [1:19:9992:0:0:2:0] Put# [1:19:9993:0:0:48:0] Put# [1:19:9994:0:0:20:0] Put# [1:19:9995:0:0:85:0] Put# [1:19:9996:0:0:84:0] Put# [1:19:9997:0:0:46:0] Put# [1:19:9998:0:0:78:0] Put# [1:19:9999:0:0:19:0] Put# [1:19:10000:0:0:3:0] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2025-11-03T14:04:02.697082Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:02.697109Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:02.700607Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:02.700674Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:02.732339Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:02.732484Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=14496765064986679960, session=0, seqNo=0) 2025-11-03T14:04:02.732516Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:02.743339Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=14496765064986679960, session=1) 2025-11-03T14:04:02.743414Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=6924684207799438845, session=0, seqNo=0) 2025-11-03T14:04:02.743444Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:02.754120Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=6924684207799438845, session=2) 2025-11-03T14:04:02.754200Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=111, session=1, semaphore="Lock1" count=1) 2025-11-03T14:04:02.754234Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-11-03T14:04:02.754246Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:02.764988Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=111) 2025-11-03T14:04:02.765055Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2025-11-03T14:04:02.765121Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=333, session=2, semaphore="Lock1" count=1) 2025-11-03T14:04:02.765145Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2025-11-03T14:04:02.775915Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=222) 2025-11-03T14:04:02.775938Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=333) 2025-11-03T14:04:02.776061Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:154:2176], cookie=972200695783489427, name="Lock1") 2025-11-03T14:04:02.776077Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:154:2176], cookie=972200695783489427) 2025-11-03T14:04:02.965822Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:02.965850Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:02.968625Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:02.968711Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:02.989778Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:02.989905Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:138:2162], cookie=1681029732582201012, session=0, seqNo=0) 2025-11-03T14:04:02.989935Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:03.000980Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:138:2162], cookie=1681029732582201012, session=1) 2025-11-03T14:04:03.001080Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:138:2162], cookie=7212210693476300106, session=0, seqNo=0) 2025-11-03T14:04:03.001117Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:03.011853Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:138:2162], cookie=7212210693476300106, session=2) 2025-11-03T14:04:03.011930Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-11-03T14:04:03.011962Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-11-03T14:04:03.011975Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:03.022813Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=111) 2025-11-03T14:04:03.022899Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2025-11-03T14:04:03.022982Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2025-11-03T14:04:03.033933Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=222) 2025-11-03T14:04:03.033963Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=333) 2025-11-03T14:04:03.034064Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:154:2176], cookie=14608787759224418411, name="Lock1") 2025-11-03T14:04:03.034079Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:154:2176], cookie=14608787759224418411) 2025-11-03T14:04:03.034119Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:157:2179], cookie=13602983103965691309, name="Lock1") 2025-11-03T14:04:03.034124Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:157:2179], cookie=13602983103965691309) 2025-11-03T14:04:03.231102Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.231134Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.234437Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.234497Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.255749Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:03.255878Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2162], cookie=8766140464170093778, session=0, seqNo=0) 2025-11-03T14:04:03.255910Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:03.266870Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2162], cookie=8766140464170093778, session=1) 2025-11-03T14:04:03.266985Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2162], cookie=9721730381035542781, session=0, seqNo=0) 2025-11-03T14:04:03.267026Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:03.277877Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2162], cookie=9721730381035542781, session=2) 2025-11-03T14:04:03.278067Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:138:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-11-03T14:04:03.278111Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-11-03T14:04:03.278129Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:03.288993Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:138:2162], cookie=111) 2025-11-03T14:04:03.289089Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:138:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2025-11-03T14:04:03.289167Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:138:2162], cookie=333, session=2, semaphore="Lock1" count=1) 2025-11-03T14:04:03.289182Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-11-03T14:04:03.300062Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:138:2162], cookie=222) 2025-11-03T14:04:03.300095Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:138:2162], cookie=333) 2025-11-03T14:04:03.300223Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:157:2179], cookie=14635727032065225173, name="Lock1") 2025-11-03T14:04:03.300244Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:157:2179], cookie=14635727032065225173) 2025-11-03T14:04:03.300307Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:160:2182], cookie=15382007205406048545, name="Lock1") 2025-11-03T14:04:03.300314Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:160:2182], cookie=15382007205406048545) 2025-11-03T14:04:03.303468Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.303502Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.303568Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.303650Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.336250Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:03.336303Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:03.336435Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:199:2212], cookie=14795740781827187293, name="Lock1") 2025-11-03T14:04:03.336456Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:199:2212], cookie=14795740781827187293) 2025-11-03T14:04:03.336563Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:207:2219], cookie=11557780355949521736, name="Lock1") 2025-11-03T14:04:03.336572Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:207:2219], cookie=11557780355949521736) 2025-11-03T14:04:03.493397Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.493434Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.498031Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.498070Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.519584Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:03.519705Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2162], cookie=5309926776653170861, session=0, seqNo=0) 2025-11-03T14:04:03.519734Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:03.540540Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2162], cookie=5309926776653170861, session=1) 2025-11-03T14:04:03.540611Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2162], cookie=10945962826673403828, session=0, seqNo=0) 2025-11-03T14:04:03.540635Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:03.551210Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2162], cookie=10945962826673403828, session=2) 2025-11-03T14:04:03.551280Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-11-03T14:04:03.551309Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-11-03T14:04:03.551322Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:03.562065Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2162], cookie=111) 2025-11-03T14:04:03.562117Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2025-11-03T14:04:03.562163Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:137:2162], cookie=333, name="Lock1") 2025-11-03T14:04:03.562171Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-11-03T14:04:03.572669Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2162], cookie=222) 2025-11-03T14:04:03.572687Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:137:2162], cookie=333) 2025-11-03T14:04:03.754099Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.754126Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.757303Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.757367Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.788591Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:03.789493Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:137:2162], cookie=13405336733337047219, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-11-03T14:04:03.789535Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-11-03T14:04:03.800193Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:137:2162], cookie=13405336733337047219) 2025-11-03T14:04:03.800307Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:147:2169], cookie=15968276199387341312, path="/Root/Res", config={ }) 2025-11-03T14:04:03.800357Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-11-03T14:04:03.811131Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:147:2169], cookie=15968276199387341312) 2025-11-03T14:04:03.811466Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:152:2174]. Cookie: 13952724641068096967. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:03.811481Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:152:2174], cookie=13952724641068096967) 2025-11-03T14:04:03.811543Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:152:2174]. Cookie: 9178424282163832829. Data: { } 2025-11-03T14:04:03.811549Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:152:2174], cookie=9178424282163832829) 2025-11-03T14:04:03.852240Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:152:2174]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-11-03T14:04:03.903142Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:152:2174]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-11-03T14:04:03.933597Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:152:2174]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-11-03T14:04:03.974354Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:152:2174]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-11-03T14:04:04.015090Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:152:2174]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> KqpFlipJoin::RightSemi_1 >> TKesusTest::TestRegisterProxyFromDeadActor [GOOD] >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure >> TKesusTest::TestSessionStealing >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey >> KqpJoinOrder::Chain65Nodes >> test.py::test[ql_filter-integer_escaping--Results] [GOOD] >> test.py::test[ql_filter-integer_many_noskiff--ForceBlocks] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2025-11-03T14:04:03.250655Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.250696Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.255175Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.255263Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.287551Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:03.287688Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=6090010450939013444, session=0, seqNo=0) 2025-11-03T14:04:03.287717Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:03.298486Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=6090010450939013444, session=1) 2025-11-03T14:04:03.298573Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=3388497991491092034, session=0, seqNo=0) 2025-11-03T14:04:03.298604Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:03.309604Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=3388497991491092034, session=2) 2025-11-03T14:04:03.514826Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.514863Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.517949Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.518116Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.539421Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:03.539578Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:138:2162], cookie=12268706032383053172, session=1, seqNo=0) 2025-11-03T14:04:03.550190Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:138:2162], cookie=12268706032383053172, session=1) 2025-11-03T14:04:03.779468Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.779495Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.782658Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.782719Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.803875Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:03.804073Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2162], cookie=14852205265879437582, session=0, seqNo=0) 2025-11-03T14:04:03.804105Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:03.814907Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2162], cookie=14852205265879437582, session=1) 2025-11-03T14:04:04.077049Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:04.077078Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:04.080412Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:04.080445Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:04.101779Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:04.101913Z node 4 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[4:137:2162], cookie=15515761025305672264, path="") 2025-11-03T14:04:04.123118Z node 4 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[4:137:2162], cookie=15515761025305672264, status=SUCCESS) 2025-11-03T14:04:04.123322Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:147:2169], cookie=5559917829154087023, session=0, seqNo=0) 2025-11-03T14:04:04.123365Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:04.134108Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:147:2169], cookie=5559917829154087023, session=1) 2025-11-03T14:04:04.134287Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:148:2170], cookie=111, session=0, seqNo=0) 2025-11-03T14:04:04.134320Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:04.134353Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:262: [72057594037927937] Fast-path attach session=1 to sender=[4:148:2170], cookie=222, seqNo=0 2025-11-03T14:04:04.145108Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:148:2170], cookie=111, session=2) 2025-11-03T14:04:04.406360Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:04.406391Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:04.410023Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:04.410087Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:04.441679Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:04.441791Z node 5 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[5:137:2162], cookie=16797646747989767844, path="") 2025-11-03T14:04:04.452590Z node 5 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[5:137:2162], cookie=16797646747989767844, status=SUCCESS) 2025-11-03T14:04:04.452796Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:147:2169], cookie=9470971921566914353, session=0, seqNo=0) 2025-11-03T14:04:04.452831Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:04.463673Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:147:2169], cookie=9470971921566914353, session=1) 2025-11-03T14:04:04.463866Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:147:2169], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2025-11-03T14:04:04.463924Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-11-03T14:04:04.463945Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:04.464018Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:148:2170], cookie=111, session=0, seqNo=0) 2025-11-03T14:04:04.464035Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:04.464057Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:148:2170], cookie=222, session=1, seqNo=0) 2025-11-03T14:04:04.474766Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:147:2169], cookie=123) 2025-11-03T14:04:04.474793Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:148:2170], cookie=111, session=2) 2025-11-03T14:04:04.474800Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:148:2170], cookie=222, session=1) >> TKesusTest::TestRegisterProxyLinkFailure [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace >> TKesusTest::TestSessionStealingAnyKey [GOOD] |56.2%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[insert-drop_sortness-calc-Results] [GOOD] |56.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |56.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |56.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2025-11-03T14:04:02.818222Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:02.818251Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:02.821457Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:02.821532Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:02.852935Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:02.853828Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:137:2162], cookie=13671286700567677375, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-11-03T14:04:02.853880Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-11-03T14:04:02.864631Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:137:2162], cookie=13671286700567677375) 2025-11-03T14:04:02.864796Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:147:2169], cookie=12823824508488498614, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-11-03T14:04:02.864856Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2025-11-03T14:04:02.875703Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:147:2169], cookie=12823824508488498614) 2025-11-03T14:04:02.875870Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:152:2174], cookie=13356649323792412229, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-11-03T14:04:02.875922Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2025-11-03T14:04:02.886784Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:152:2174], cookie=13356649323792412229) 2025-11-03T14:04:02.887011Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:157:2179], cookie=18212061514017198669, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-11-03T14:04:02.887072Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-11-03T14:04:02.899611Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:157:2179], cookie=18212061514017198669) 2025-11-03T14:04:02.899813Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:162:2184], cookie=12176935767472988393, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-11-03T14:04:02.899873Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2025-11-03T14:04:02.911298Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:162:2184], cookie=12176935767472988393) 2025-11-03T14:04:02.911476Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:167:2189], cookie=4913994448000281442, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2025-11-03T14:04:02.911533Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2025-11-03T14:04:02.922580Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:167:2189], cookie=4913994448000281442) 2025-11-03T14:04:02.922822Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:172:2194], cookie=12928217193112268755, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-11-03T14:04:02.922902Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 7 "Root2" 2025-11-03T14:04:02.933870Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:172:2194], cookie=12928217193112268755) 2025-11-03T14:04:02.934080Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:177:2199], cookie=14332693595159739449, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2025-11-03T14:04:02.934162Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2025-11-03T14:04:02.945032Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:177:2199], cookie=14332693595159739449) 2025-11-03T14:04:02.945195Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:182:2204], cookie=18407230065391079514, ids=[100], paths=[], recursive=0) 2025-11-03T14:04:02.945218Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:182:2204], cookie=18407230065391079514) 2025-11-03T14:04:02.945296Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:185:2207], cookie=17789444081445555383, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-11-03T14:04:02.945308Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:185:2207], cookie=17789444081445555383) 2025-11-03T14:04:02.945363Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:188:2210], cookie=13146481619577242860, ids=[], paths=[/Root, ], recursive=0) 2025-11-03T14:04:02.945374Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:188:2210], cookie=13146481619577242860) 2025-11-03T14:04:02.945431Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:191:2213], cookie=12096980782790990661, ids=[1, 1], paths=[], recursive=0) 2025-11-03T14:04:02.945439Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:191:2213], cookie=12096980782790990661) 2025-11-03T14:04:02.945529Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:194:2216], cookie=6624166784999873782, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-11-03T14:04:02.945537Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:194:2216], cookie=6624166784999873782) 2025-11-03T14:04:02.945592Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:197:2219], cookie=17451890560305203005, ids=[], paths=[], recursive=1) 2025-11-03T14:04:02.945602Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:197:2219], cookie=17451890560305203005) 2025-11-03T14:04:02.945683Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:200:2222], cookie=16539574885345710841, ids=[], paths=[], recursive=0) 2025-11-03T14:04:02.945690Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:200:2222], cookie=16539574885345710841) 2025-11-03T14:04:02.945743Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:203:2225], cookie=15167613883243769754, ids=[3, 2], paths=[], recursive=1) 2025-11-03T14:04:02.945750Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:203:2225], cookie=15167613883243769754) 2025-11-03T14:04:02.945807Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:206:2228], cookie=7271893982750830390, ids=[3, 2], paths=[], recursive=0) 2025-11-03T14:04:02.945813Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:206:2228], cookie=7271893982750830390) 2025-11-03T14:04:02.945871Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:209:2231], cookie=9500744610289582652, ids=[], paths=[Root2/], recursive=1) 2025-11-03T14:04:02.945879Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:209:2231], cookie=9500744610289582652) 2025-11-03T14:04:02.945938Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:212:2234], cookie=11890757878079857772, ids=[], paths=[Root2/], recursive=0) 2025-11-03T14:04:02.945944Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:212:2234], cookie=11890757878079857772) 2025-11-03T14:04:02.949114Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:02.949144Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:02.949203Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:02.949342Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:02.991798Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:02.991904Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:251:2264], cookie=10627582953152761448, ids=[100], paths=[], recursive=0) 2025-11-03T14:04:02.991922Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:251:2264], cookie=10627582953152761448) 2025-11-03T14:04:02.992027Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:257:2269], cookie=12535518204934730849, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-11-03T14:04:02.992035Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:257:2269], cookie=12535518204934730849) 2025-11-03T14:04:02.992085Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:260:2272], cookie=16709783936067578707, ids=[], paths=[/Root, ], recursive=0) 2025-11-03T14:04:02.992094Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:260:2272], cookie=16709783936067578707) 2025-11-03T14 ... KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-11-03T14:04:04.155284Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:157:2179], cookie=10623824569983094168) 2025-11-03T14:04:04.155434Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:162:2184], cookie=9752470703138935518, ids=[], paths=[], recursive=1) 2025-11-03T14:04:04.155454Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:162:2184], cookie=9752470703138935518) 2025-11-03T14:04:04.155570Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:168:2190], cookie=14420175345442146453, ids=[], paths=[], recursive=1) 2025-11-03T14:04:04.155579Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:168:2190], cookie=14420175345442146453) 2025-11-03T14:04:04.155682Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:174:2196], cookie=14041550509118373912, ids=[], paths=[], recursive=1) 2025-11-03T14:04:04.155689Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:174:2196], cookie=14041550509118373912) 2025-11-03T14:04:04.155751Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:177:2199], cookie=18083382762596918797, id=0, path="/Root/Folder/NonexistingRes") 2025-11-03T14:04:04.155763Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:177:2199], cookie=18083382762596918797) 2025-11-03T14:04:04.155813Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:180:2202], cookie=2780954442915084716, ids=[], paths=[], recursive=1) 2025-11-03T14:04:04.155820Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:180:2202], cookie=2780954442915084716) 2025-11-03T14:04:04.155872Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:183:2205], cookie=86434049062783660, id=100, path="") 2025-11-03T14:04:04.155878Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:183:2205], cookie=86434049062783660) 2025-11-03T14:04:04.155930Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:186:2208], cookie=9122331735681344278, ids=[], paths=[], recursive=1) 2025-11-03T14:04:04.155935Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:186:2208], cookie=9122331735681344278) 2025-11-03T14:04:04.155991Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:189:2211], cookie=15324225962361714141, id=3, path="") 2025-11-03T14:04:04.155998Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:189:2211], cookie=15324225962361714141) 2025-11-03T14:04:04.156065Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:192:2214], cookie=17460896577048763652, ids=[], paths=[], recursive=1) 2025-11-03T14:04:04.156071Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:192:2214], cookie=17460896577048763652) 2025-11-03T14:04:04.156131Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:195:2217], cookie=10387877802082155610, id=0, path="/Root/Folder/Q1") 2025-11-03T14:04:04.156163Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2025-11-03T14:04:04.167048Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:195:2217], cookie=10387877802082155610) 2025-11-03T14:04:04.167221Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:200:2222], cookie=1463277873732759250, ids=[], paths=[], recursive=1) 2025-11-03T14:04:04.167245Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:200:2222], cookie=1463277873732759250) 2025-11-03T14:04:04.170098Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:04.170126Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:04.170181Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:04.170249Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:04.212998Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:04.213092Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:239:2252], cookie=1003225962337523863, ids=[], paths=[], recursive=1) 2025-11-03T14:04:04.213113Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:239:2252], cookie=1003225962337523863) 2025-11-03T14:04:04.213220Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:245:2257], cookie=16190439105147802233, id=3, path="") 2025-11-03T14:04:04.213260Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2025-11-03T14:04:04.224223Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:245:2257], cookie=16190439105147802233) 2025-11-03T14:04:04.224431Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:250:2262], cookie=7780760171920326962, ids=[], paths=[], recursive=1) 2025-11-03T14:04:04.224457Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:250:2262], cookie=7780760171920326962) 2025-11-03T14:04:04.227410Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:04.227443Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:04.227508Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:04.227661Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:04.270042Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:04.270155Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:289:2292], cookie=1781161289097990015, ids=[], paths=[], recursive=1) 2025-11-03T14:04:04.270175Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:289:2292], cookie=1781161289097990015) 2025-11-03T14:04:04.578239Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:04.578266Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:04.581101Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:04.581148Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:04.612775Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:04.612929Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:137:2162], cookie=355714780464126579, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-11-03T14:04:04.612978Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Q1" 2025-11-03T14:04:04.623763Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:137:2162], cookie=355714780464126579) 2025-11-03T14:04:04.623940Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:147:2169], cookie=921554030311737418, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-11-03T14:04:04.623985Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Q2" 2025-11-03T14:04:04.634646Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:147:2169], cookie=921554030311737418) 2025-11-03T14:04:04.634961Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:152:2174]. Cookie: 5516934787110086945. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:04.634971Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:152:2174], cookie=5516934787110086945) 2025-11-03T14:04:04.635057Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:152:2174]. Cookie: 10183663729620398731. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2025-11-03T14:04:04.635063Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:152:2174], cookie=10183663729620398731) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] Test command err: 2025-11-03T14:03:35.467129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:35.506144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:35.508673Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:35.508783Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:35.508827Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001d6a/r3tmp/tmpsoG8DU/pdisk_1.dat 2025-11-03T14:03:35.585228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:35.585277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:35.599011Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:35.600126Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178615084038 != 1762178615084042 2025-11-03T14:03:35.631629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:35.679832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:35.713096Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:35.796322Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-11-03T14:03:35.796353Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-11-03T14:03:35.796381Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:650:2545] 2025-11-03T14:03:35.814876Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:650:2545] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-11-03T14:03:35.814931Z node 1 :TX_PROXY DEBUG: schemereq.cpp:616: Actor# [1:650:2545] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-11-03T14:03:35.815206Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1741: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-11-03T14:03:35.815226Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1731: Actor# [1:650:2545] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-11-03T14:03:35.815296Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1564: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-11-03T14:03:35.815339Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1611: Actor# [1:650:2545] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-11-03T14:03:35.815356Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:650:2545] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-11-03T14:03:35.815445Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1466: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvClientConnected 2025-11-03T14:03:35.815975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:35.816285Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1488: Actor# [1:650:2545] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-11-03T14:03:35.816299Z node 1 :TX_PROXY DEBUG: schemereq.cpp:596: Actor# [1:650:2545] txid# 281474976715657 SEND to# [1:590:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-11-03T14:03:35.831915Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:03:35.832222Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:03:35.832300Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:674:2566] 2025-11-03T14:03:35.832362Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:35.844137Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:03:35.844354Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:35.844389Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:35.844578Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:35.844590Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:35.844598Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:35.844676Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:35.844712Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:35.844729Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-11-03T14:03:35.844825Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:35.857127Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:35.857223Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:35.857250Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-11-03T14:03:35.857257Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:35.857263Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:35.857269Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:35.857344Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:674:2566], Recipient [1:674:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:35.857353Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:35.857465Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:35.857490Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:35.857598Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:35.857609Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:35.857618Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-11-03T14:03:35.857624Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-11-03T14:03:35.857630Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-11-03T14:03:35.857636Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:35.857642Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:35.857659Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:681:2570], Recipient [1:674:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:35.857666Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:35.857674Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:673:2565], serverId# [1:681:2570], sessionId# [0:0:0] 2025-11-03T14:03:35.857687Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:681:2570] 2025-11-03T14:03:35.857693Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-11-03T14:03:35.857719Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:35.857790Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-11-03T14:03:35.857805Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:35.857826Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:35.857850Z node ... 1 } 2025-11-03T14:04:04.765023Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is DelayComplete 2025-11-03T14:04:04.765027Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit FinishProposeWrite 2025-11-03T14:04:04.765031Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-11-03T14:04:04.765036Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-11-03T14:04:04.765054Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:5] at 72075186224037888 is Executed 2025-11-03T14:04:04.765058Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-11-03T14:04:04.765063Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:5] at 72075186224037888 has finished 2025-11-03T14:04:04.765084Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-11-03T14:04:04.765090Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:5] at 72075186224037888 on unit FinishProposeWrite 2025-11-03T14:04:04.765096Z node 16 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 5 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_LOCKS_BROKEN 2025-11-03T14:04:04.765108Z node 16 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2025-11-03T14:04:04.765124Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:04:04.765197Z node 16 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:913: SelfId: [16:918:2682], Table: `/Root/table` ([72057594046644480:2:1]), SessionActorId: [16:853:2682]Got LOCKS BROKEN for table `/Root/table`. ShardID=72075186224037888, Sink=[16:918:2682].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2025-11-03T14:04:04.765219Z node 16 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:3727: SelfId: [16:911:2682], SessionActorId: [16:853:2682], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[16:853:2682]. 2025-11-03T14:04:04.765295Z node 16 :KQP_SESSION WARN: kqp_session_actor.cpp:2193: SessionId: ydb://session/3?node_id=16&id=MzU3MjIxZGYtM2E0YTc1M2ItM2VlY2M5ODEtY2ZjMzdmODk=, ActorId: [16:853:2682], ActorState: ExecuteState, TraceId: 01k950ger5aq70jajen035jppx, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [16:912:2682] from: [16:911:2682] 2025-11-03T14:04:04.765328Z node 16 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [16:912:2682] TxId: 281474976715663. Ctx: { TraceId: 01k950ger5aq70jajen035jppx, Database: , SessionId: ydb://session/3?node_id=16&id=MzU3MjIxZGYtM2E0YTc1M2ItM2VlY2M5ODEtY2ZjMzdmODk=, PoolId: default, DatabaseId: /Root}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } 2025-11-03T14:04:04.765420Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 278003712, Sender [16:911:2682], Recipient [16:664:2573]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-11-03T14:04:04.765427Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2025-11-03T14:04:04.765458Z node 16 :KQP_SESSION WARN: kqp_session_actor.cpp:2987: SessionId: ydb://session/3?node_id=16&id=MzU3MjIxZGYtM2E0YTc1M2ItM2VlY2M5ODEtY2ZjMzdmODk=, ActorId: [16:853:2682], ActorState: ExecuteState, TraceId: 01k950ger5aq70jajen035jppx, Create QueryResponse for error on request, msg: 2025-11-03T14:04:04.765706Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435074, Sender [16:664:2573], Recipient [16:664:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-11-03T14:04:04.765715Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3184: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-11-03T14:04:04.765726Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2025-11-03T14:04:04.765745Z node 16 :TX_DATASHARD TRACE: datashard_write_operation.cpp:68: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 } Op: Rollback } 2025-11-03T14:04:04.765756Z node 16 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-11-03T14:04:04.765766Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2025-11-03T14:04:04.765772Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is Executed 2025-11-03T14:04:04.765775Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2025-11-03T14:04:04.765778Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-11-03T14:04:04.765780Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-11-03T14:04:04.765787Z node 16 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v300/0 IncompleteEdge# v{min} UnprotectedReadEdge# v300/18446744073709551615 ImmediateWriteEdge# v301/0 ImmediateWriteEdgeReplied# v301/0 2025-11-03T14:04:04.765809Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2025-11-03T14:04:04.765814Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is Executed 2025-11-03T14:04:04.765818Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-11-03T14:04:04.765823Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2025-11-03T14:04:04.765827Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2025-11-03T14:04:04.765833Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is Executed 2025-11-03T14:04:04.765837Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2025-11-03T14:04:04.765841Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2025-11-03T14:04:04.765845Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2025-11-03T14:04:04.765851Z node 16 :TX_DATASHARD DEBUG: execute_write_unit.cpp:260: Executing write operation for [0:6] at 72075186224037888 2025-11-03T14:04:04.765867Z node 16 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 2025-11-03T14:04:04.765872Z node 16 :TX_DATASHARD DEBUG: execute_write_unit.cpp:461: Skip empty write operation for [0:6] at 72075186224037888 2025-11-03T14:04:04.765886Z node 16 :TX_DATASHARD TRACE: execute_write_unit.cpp:47: add locks to result: 0 2025-11-03T14:04:04.765896Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2025-11-03T14:04:04.765899Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2025-11-03T14:04:04.765901Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2025-11-03T14:04:04.765904Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-11-03T14:04:04.765910Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is DelayComplete 2025-11-03T14:04:04.765912Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2025-11-03T14:04:04.765915Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-11-03T14:04:04.765917Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-11-03T14:04:04.765925Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:6] at 72075186224037888 is Executed 2025-11-03T14:04:04.765927Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-11-03T14:04:04.765930Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:6] at 72075186224037888 has finished 2025-11-03T14:04:04.765938Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-11-03T14:04:04.765941Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2025-11-03T14:04:04.765945Z node 16 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-11-03T14:04:04.765954Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:04:04.766122Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 275709965, Sender [16:69:2116], Recipient [16:664:2573]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 16 Status: STATUS_NOT_FOUND 2025-11-03T14:04:04.766613Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [16:925:2732], Recipient [16:664:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:04:04.766641Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:04:04.766647Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [16:924:2731], serverId# [16:925:2732], sessionId# [0:0:0] 2025-11-03T14:04:04.766664Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553224, Sender [16:590:2518], Recipient [16:664:2573]: NKikimr::TEvDataShard::TEvGetOpenTxs >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull >> TopicAutoscaling::ControlPlane_CDC [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Enable >> DataShardSnapshots::ShardRestartAfterDropTable [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] Test command err: 2025-11-03T14:04:03.928462Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.928485Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.931454Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.931499Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.962777Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:03.962900Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=1959543842344183985, session=0, seqNo=0) 2025-11-03T14:04:03.962925Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:03.973563Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=1959543842344183985, session=1) 2025-11-03T14:04:03.973782Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:137:2162], cookie=7617013352753326388, session=2) 2025-11-03T14:04:03.973796Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:137:2162], cookie=7617013352753326388) 2025-11-03T14:04:03.973838Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[1:137:2162], cookie=14185424681797997451 2025-11-03T14:04:03.973894Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=875951697305754345, session=1, seqNo=0) 2025-11-03T14:04:03.984514Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=875951697305754345, session=1) 2025-11-03T14:04:03.984577Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-11-03T14:04:03.984605Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-11-03T14:04:03.984616Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:03.984644Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:137:2162], cookie=1740672466572665906, session=1) 2025-11-03T14:04:03.994763Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-11-03T14:04:03.994779Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-11-03T14:04:03.994785Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-11-03T14:04:04.005404Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=111) 2025-11-03T14:04:04.005430Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:137:2162], cookie=1740672466572665906) 2025-11-03T14:04:04.005436Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-11-03T14:04:04.204715Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:04.204755Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:04.209465Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:04.209641Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:04.231495Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:04.231625Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[2:138:2162], cookie=2614520963249847777, path="") 2025-11-03T14:04:04.242773Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[2:138:2162], cookie=2614520963249847777, status=SUCCESS) 2025-11-03T14:04:04.242931Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:147:2169], cookie=111, session=0, seqNo=0) 2025-11-03T14:04:04.242965Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:04.243006Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[2:147:2169], cookie=15007791579149706613, session=1) 2025-11-03T14:04:04.253170Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-11-03T14:04:04.253197Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-11-03T14:04:04.264243Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:147:2169], cookie=111, session=1) 2025-11-03T14:04:04.264270Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[2:147:2169], cookie=15007791579149706613) 2025-11-03T14:04:04.264277Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-11-03T14:04:04.469706Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:04.469728Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:04.473543Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:04.473595Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:04.494588Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:04.494709Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2162], cookie=14566977681667017634, session=0, seqNo=0) 2025-11-03T14:04:04.494746Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:04.505514Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2162], cookie=14566977681667017634, session=1) 2025-11-03T14:04:04.505676Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:138:2162], cookie=10198362018725699031, session=1) 2025-11-03T14:04:04.505700Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-11-03T14:04:04.516833Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:138:2162], cookie=10198362018725699031) 2025-11-03T14:04:04.517035Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:155:2177], cookie=406073963029228223) 2025-11-03T14:04:04.517055Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:155:2177], cookie=406073963029228223) 2025-11-03T14:04:04.517147Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:158:2180], cookie=5816126122016565054, session=0, seqNo=0) 2025-11-03T14:04:04.517182Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:04.528089Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:158:2180], cookie=5816126122016565054, session=2) 2025-11-03T14:04:04.528275Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:138:2162], cookie=2407100398691890349, session=2) 2025-11-03T14:04:04.528298Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 2 2025-11-03T14:04:04.539046Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:138:2162], cookie=2407100398691890349) 2025-11-03T14:04:04.767755Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:04.767789Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:04.772493Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:04.772540Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:04.794244Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:04.794458Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2162], cookie=12345, session=0, seqNo=0) 2025-11-03T14:04:04.794494Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:04.815689Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2162], cookie=12345, session=1) 2025-11-03T14:04:04.815876Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:144:2167], cookie=23456, session=1, seqNo=0) 2025-11-03T14:04:04.826852Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:144:2167], cookie=23456, session=1) 2025-11-03T14:04:05.036352Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:05.036382Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:05.039650Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:05.039717Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:05.071418Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:05.071624Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2162], cookie=12345, session=0, seqNo=0) 2025-11-03T14:04:05.071665Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:05.082337Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2162], cookie=12345, session=1) 2025-11-03T14:04:05.082457Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:145:2167], cookie=23456, session=1, seqNo=0) 2025-11-03T14:04:05.093292Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:145:2167], cookie=23456, session=1) >> KqpIndexLookupJoin::RightSemi+StreamLookup >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot >> KqpJoin::JoinConvert |56.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |56.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |56.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |56.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes |56.2%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] >> test.py::test[join-premap_common_left_cross--ForceBlocks] [GOOD] >> test.py::test[join-premap_common_left_cross--Results] >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources >> KqpFlipJoin::RightSemi_1 [GOOD] >> KqpFlipJoin::RightOnly_3 >> test.py::test[aggregate-group_by_rollup_grouping_hum--Results] [GOOD] >> test.py::test[aggregate-group_by_session--Results] |56.2%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] Test command err: 2025-11-03T14:04:03.916024Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.916052Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.919852Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.919917Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.951333Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:04.185497Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:04.185534Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:04.190102Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:04.190254Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:04.211483Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:04.465336Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:04.465366Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:04.469766Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:04.469834Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:04.491129Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:04.771819Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:04.771859Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:04.776505Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:04.776564Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:04.818336Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:04.818698Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 5 2025-11-03T14:04:04.818952Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([4:195:2163]) 2025-11-03T14:04:05.234108Z node 6 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:05.234145Z node 6 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:05.239164Z node 6 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:05.239234Z node 6 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute ... waiting for register request 2025-11-03T14:04:05.271229Z node 6 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR cookie 2461358261836449738 ... waiting for register request (done) ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR 2025-11-03T14:04:05.271466Z node 6 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 7 2025-11-03T14:04:05.271642Z node 6 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([6:196:2163]) >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError >> test.py::test[aggregate-group_by_expr_and_having--Results] [GOOD] >> test.py::test[aggregate-group_by_expr_with_join--Results] >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] |56.2%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |56.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |56.3%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] Test command err: 2025-11-03T14:03:35.955159Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:35.995631Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:35.997942Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:35.998025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:35.998061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001d5f/r3tmp/tmpM2VD3H/pdisk_1.dat 2025-11-03T14:03:36.063558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:36.063590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:36.070714Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:36.071271Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178615480860 != 1762178615480864 2025-11-03T14:03:36.102309Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:36.148804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:36.186215Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:36.269233Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-11-03T14:03:36.269255Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-11-03T14:03:36.269276Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:650:2545] 2025-11-03T14:03:36.283875Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:650:2545] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-11-03T14:03:36.283921Z node 1 :TX_PROXY DEBUG: schemereq.cpp:616: Actor# [1:650:2545] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-11-03T14:03:36.284145Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1741: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-11-03T14:03:36.284163Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1731: Actor# [1:650:2545] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-11-03T14:03:36.284233Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1564: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-11-03T14:03:36.284269Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1611: Actor# [1:650:2545] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-11-03T14:03:36.284284Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:650:2545] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-11-03T14:03:36.284362Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1466: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvClientConnected 2025-11-03T14:03:36.284774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:36.285023Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1488: Actor# [1:650:2545] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-11-03T14:03:36.285036Z node 1 :TX_PROXY DEBUG: schemereq.cpp:596: Actor# [1:650:2545] txid# 281474976715657 SEND to# [1:590:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-11-03T14:03:36.298408Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:03:36.298600Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:03:36.298669Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:674:2566] 2025-11-03T14:03:36.298709Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:36.305797Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:03:36.305977Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:36.305999Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:36.306188Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:36.306198Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:36.306206Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:36.306252Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:36.306268Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:36.306277Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-11-03T14:03:36.306343Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:36.314381Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:36.314443Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:36.314462Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-11-03T14:03:36.314467Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:36.314470Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:36.314474Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:36.314534Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:674:2566], Recipient [1:674:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:36.314541Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:36.314613Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:36.314656Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:36.314739Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:36.314748Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:36.314756Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-11-03T14:03:36.314762Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-11-03T14:03:36.314769Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-11-03T14:03:36.314775Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:36.314779Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:36.314790Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:681:2570], Recipient [1:674:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:36.314794Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:36.314798Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:673:2565], serverId# [1:681:2570], sessionId# [0:0:0] 2025-11-03T14:03:36.314807Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:681:2570] 2025-11-03T14:03:36.314810Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-11-03T14:03:36.314825Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:36.314870Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-11-03T14:03:36.314880Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:36.314892Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:36.314907Z node ... essProposeKqpTransaction 2025-11-03T14:04:05.997647Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715671. Ctx: { TraceId: 01k950gfxw6a26pcq16zxhn9y5, Database: , SessionId: ydb://session/3?node_id=13&id=NTk2NmYyNzktNjQzMWVkMDAtZGYxMGNlNDMtM2E0NDYyYWI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-11-03T14:04:05.998168Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [13:1649:3347], Recipient [13:798:2653]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-11-03T14:04:05.998210Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2561: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-11-03T14:04:05.998219Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037889 CompleteEdge# v8001/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-11-03T14:04:05.998225Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2664: 72075186224037889 changed HEAD read to non-repeatable v9000/18446744073709551615 2025-11-03T14:04:05.998234Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-11-03T14:04:05.998253Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-11-03T14:04:05.998257Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-11-03T14:04:05.998262Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-11-03T14:04:05.998265Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-11-03T14:04:05.998277Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2025-11-03T14:04:05.998282Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-11-03T14:04:05.998285Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-11-03T14:04:05.998289Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-11-03T14:04:05.998292Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-11-03T14:04:05.998304Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1657: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-11-03T14:04:05.998351Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2272: 72075186224037889 Complete read# {[13:1649:3347], 0} after executionsCount# 1 2025-11-03T14:04:05.998357Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2246: 72075186224037889 read iterator# {[13:1649:3347], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-11-03T14:04:05.998374Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2297: 72075186224037889 read iterator# {[13:1649:3347], 0} finished in read 2025-11-03T14:04:05.998384Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-11-03T14:04:05.998388Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-11-03T14:04:05.998393Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-11-03T14:04:05.998398Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-11-03T14:04:05.998411Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037889 is Executed 2025-11-03T14:04:05.998415Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-11-03T14:04:05.998420Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:4] at 72075186224037889 has finished 2025-11-03T14:04:05.998425Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2796: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-11-03T14:04:05.998446Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2845: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-11-03T14:04:05.998744Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [13:1649:3347], Recipient [13:798:2653]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-11-03T14:04:05.998759Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3519: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 21 } } 2025-11-03T14:04:06.047479Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:67:2114] Handle TEvExecuteKqpTransaction 2025-11-03T14:04:06.047511Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:67:2114] TxId# 281474976715672 ProcessProposeKqpTransaction 2025-11-03T14:04:06.047712Z node 13 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976715672. Ctx: { TraceId: 01k950gfzg8ndkchn95xdhb5z8, Database: , SessionId: ydb://session/3?node_id=13&id=N2NhOWIwLTlhNmZiMTA2LTYwZjU5YzRlLThhOGIyYWE0, PoolId: default, DatabaseId: /Root}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-11-03T14:04:06.048176Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553215, Sender [13:1681:3373], Recipient [13:1069:2871]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-11-03T14:04:06.048229Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2561: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-11-03T14:04:06.048241Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037891 CompleteEdge# v8001/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-11-03T14:04:06.048249Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2664: 72075186224037891 changed HEAD read to non-repeatable v9000/18446744073709551615 2025-11-03T14:04:06.048260Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037891 on unit CheckRead 2025-11-03T14:04:06.048282Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037891 is Executed 2025-11-03T14:04:06.048288Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037891 executing on unit CheckRead 2025-11-03T14:04:06.048294Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-11-03T14:04:06.048298Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037891 on unit BuildAndWaitDependencies 2025-11-03T14:04:06.048312Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037891 2025-11-03T14:04:06.048322Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037891 is Executed 2025-11-03T14:04:06.048327Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-11-03T14:04:06.048331Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037891 to execution unit ExecuteRead 2025-11-03T14:04:06.048336Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037891 on unit ExecuteRead 2025-11-03T14:04:06.048353Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1657: 72075186224037891 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-11-03T14:04:06.048418Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2272: 72075186224037891 Complete read# {[13:1681:3373], 0} after executionsCount# 1 2025-11-03T14:04:06.048428Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2246: 72075186224037891 read iterator# {[13:1681:3373], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-11-03T14:04:06.048443Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2297: 72075186224037891 read iterator# {[13:1681:3373], 0} finished in read 2025-11-03T14:04:06.048457Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037891 is Executed 2025-11-03T14:04:06.048461Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037891 executing on unit ExecuteRead 2025-11-03T14:04:06.048466Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:4] at 72075186224037891 to execution unit CompletedOperations 2025-11-03T14:04:06.048470Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:4] at 72075186224037891 on unit CompletedOperations 2025-11-03T14:04:06.048482Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:4] at 72075186224037891 is Executed 2025-11-03T14:04:06.048486Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:4] at 72075186224037891 executing on unit CompletedOperations 2025-11-03T14:04:06.048490Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:4] at 72075186224037891 has finished 2025-11-03T14:04:06.048494Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2796: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-11-03T14:04:06.048521Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2845: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 2025-11-03T14:04:06.048741Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553219, Sender [13:1681:3373], Recipient [13:1069:2871]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-11-03T14:04:06.048754Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3519: 72075186224037891 ReadCancel: { ReadId: 0 } { items { uint32_value: 10 } items { uint32_value: 110 } }, { items { uint32_value: 20 } items { uint32_value: 210 } } >> ColumnBuildTest::BaseCase >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule >> KqpJoin::JoinConvert [GOOD] >> KqpJoin::JoinDupColumnRight >> CommitOffset::Commit_WithoutSession_ToPastParentPartition [GOOD] >> CommitOffset::Commit_WithSession_ToPastParentPartition >> KqpFlipJoin::RightOnly_3 [GOOD] |56.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |56.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 5563, MsgBus: 1149 2025-11-03T14:04:04.197725Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499646265637744:2065];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:04.197756Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e09/r3tmp/tmpABw7HJ/pdisk_1.dat 2025-11-03T14:04:04.244488Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:04.244659Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:04.244872Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499646265637719:2081] 1762178644197438 != 1762178644197441 TServer::EnableGrpc on GrpcPort 5563, node 1 2025-11-03T14:04:04.253933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:04.253946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:04.253949Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:04.253992Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:1149 TClient is connected to server localhost:1149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:04.301774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:04.301798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:04.302904Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:04.307580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:04.359094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:04.377290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:04.395551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:04.407830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:04.449940Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:04.617588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499646265639371:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.617617Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.617721Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499646265639381:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.617731Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.680775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:04.687965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:04.697698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:04.710484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:04.724697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:04.738714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:04.752157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:04.767595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:04.783027Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499646265640243:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.783046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499646265640248:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.783051Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.783100Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499646265640251:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.783112Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.783701Z node 1 :FLAT_TX_SCHEMESHARD WARN ... pp:228: got bad distributable configuration TClient is connected to server localhost:19727 TClient is connected to server localhost:19727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:05.642062Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-11-03T14:04:05.652570Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:05.662541Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:05.681486Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:05.692971Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:05.802784Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:05.958951Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499650095159212:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:05.958979Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:05.959084Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499650095159222:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:05.959097Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:05.969856Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:05.978669Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:05.991233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.006430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.020275Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.033785Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.047941Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.061787Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.077488Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499654390127381:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.077509Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.077521Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499654390127386:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.077530Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499654390127388:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.077535Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.078281Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:06.089276Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7568499654390127390:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-11-03T14:04:06.154809Z node 2 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [2:7568499654390127442:3555] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:06.409866Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.424020Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.533447Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |56.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> KqpIndexLookupJoin::RightSemi+StreamLookup [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient >> KqpIndexLookupJoin::RightSemi-StreamLookup >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError |56.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |56.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |56.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |56.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |56.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[distinct-distinct_having_no_agg-default.txt-Results] [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> test.py::test[aggregate-list_after_group-default.txt-Results] [GOOD] >> ColumnBuildTest::CancelBuild >> TKesusTest::TestQuoterAccountLabels >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError |56.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |56.3%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |56.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore >> ColumnBuildTest::AlreadyExists >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightOnly_3 [GOOD] Test command err: Trying to start YDB, gRPC: 16198, MsgBus: 19880 2025-11-03T14:04:04.863005Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499647996796600:2068];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:04.863222Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e03/r3tmp/tmpwWppPx/pdisk_1.dat 2025-11-03T14:04:04.889793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:04.899574Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:04.899800Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499647996796571:2081] 1762178644862520 != 1762178644862523 TServer::EnableGrpc on GrpcPort 16198, node 1 2025-11-03T14:04:04.910765Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:04.910784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:04.910787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:04.910839Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:19880 TClient is connected to server localhost:19880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:04.969370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:04.969407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:04.970356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:04.971679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:04.975413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-11-03T14:04:04.977050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:04.996276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:05.014551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:05.026898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:05.153273Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:05.216640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499652291765515:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:05.216675Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:05.216747Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499652291765525:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:05.216762Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:05.263725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:05.270911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:05.285218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:05.299117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:05.312527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:05.326932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:05.340469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:05.355268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:05.370969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499652291766387:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:05.370990Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499652291766392:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:05.370997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:05.371050Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499652291766395:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:05.371063Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, ... ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-11-03T14:04:06.267523Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:06.270125Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-11-03T14:04:06.276739Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:06.290484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:06.315920Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:06.331921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:06.401747Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:06.595095Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499655343311865:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.595124Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.595259Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499655343311875:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.595265Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.607779Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.616807Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.629400Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.643062Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.657241Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.670710Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.684920Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.699765Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.716196Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499655343312736:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.716247Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.716317Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499655343312742:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.716325Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.716325Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499655343312741:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.717200Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:06.727618Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7568499655343312745:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-11-03T14:04:06.826007Z node 2 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [2:7568499655343312797:3553] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:07.031341Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.040396Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.051673Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.064347Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.184684Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] Test command err: 2025-11-03T14:03:35.877679Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:35.902426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:35.904211Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:35.904290Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:35.904325Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001d41/r3tmp/tmpoieOJq/pdisk_1.dat 2025-11-03T14:03:35.974026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:35.974057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:35.982887Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:35.983602Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178615502561 != 1762178615502565 2025-11-03T14:03:36.014671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:36.060659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:36.092928Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:36.176583Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-11-03T14:03:36.176616Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-11-03T14:03:36.176650Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:650:2545] 2025-11-03T14:03:36.205708Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:650:2545] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-11-03T14:03:36.205769Z node 1 :TX_PROXY DEBUG: schemereq.cpp:616: Actor# [1:650:2545] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-11-03T14:03:36.206017Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1741: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-11-03T14:03:36.206037Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1731: Actor# [1:650:2545] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-11-03T14:03:36.206115Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1564: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-11-03T14:03:36.206161Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1611: Actor# [1:650:2545] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-11-03T14:03:36.206181Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:650:2545] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-11-03T14:03:36.206282Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1466: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvClientConnected 2025-11-03T14:03:36.207303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:36.207803Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1488: Actor# [1:650:2545] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-11-03T14:03:36.207826Z node 1 :TX_PROXY DEBUG: schemereq.cpp:596: Actor# [1:650:2545] txid# 281474976715657 SEND to# [1:590:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-11-03T14:03:36.223262Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:03:36.223561Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:03:36.223646Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:674:2566] 2025-11-03T14:03:36.223715Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:36.235372Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:03:36.235628Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:36.235665Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:36.235873Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:36.235884Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:36.235893Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:36.235973Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:36.236004Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:36.236019Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-11-03T14:03:36.236121Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:36.250800Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:36.250924Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:36.250960Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-11-03T14:03:36.250967Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:36.250973Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:36.250981Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:36.251082Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:674:2566], Recipient [1:674:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:36.251091Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:36.251252Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:36.251283Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:36.251419Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:36.251430Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:36.251439Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-11-03T14:03:36.251446Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-11-03T14:03:36.251453Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-11-03T14:03:36.251460Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:36.251467Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:36.251485Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:681:2570], Recipient [1:674:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:36.251491Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:36.251499Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:673:2565], serverId# [1:681:2570], sessionId# [0:0:0] 2025-11-03T14:03:36.251511Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:681:2570] 2025-11-03T14:03:36.251516Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-11-03T14:03:36.251543Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:36.251615Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-11-03T14:03:36.251630Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:36.251651Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:36.251672Z node ... (wrong shard state), code: 2029 . sessionActorId=[14:855:2685]. 2025-11-03T14:04:06.490428Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2193: SessionId: ydb://session/3?node_id=14&id=ZTBmMTI4NmYtMjA2YjczYWMtNDMyMGQ5N2UtNjMyYzUyNDU=, ActorId: [14:855:2685], ActorState: ExecuteState, TraceId: 01k950gged1mnpvep7v25wt1wz, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [14:1008:2685] from: [14:876:2685] 2025-11-03T14:04:06.490454Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 278003712, Sender [14:876:2685], Recipient [14:665:2573]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true } Op: Rollback } 2025-11-03T14:04:06.490459Z node 14 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2025-11-03T14:04:06.490471Z node 14 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2025-11-03T14:04:06.490477Z node 14 :TX_DATASHARD NOTICE: datashard.cpp:3149: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-11-03T14:04:06.490498Z node 14 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [14:1008:2685] TxId: 281474976715665. Ctx: { TraceId: 01k950gged1mnpvep7v25wt1wz, Database: , SessionId: ydb://session/3?node_id=14&id=ZTBmMTI4NmYtMjA2YjczYWMtNDMyMGQ5N2UtNjMyYzUyNDU=, PoolId: default, DatabaseId: /Root}. UNAVAILABLE: {
: Error: Wrong shard state. Table `/Root/table`., code: 2005 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } 2025-11-03T14:04:06.490540Z node 14 :KQP_SESSION WARN: kqp_session_actor.cpp:2987: SessionId: ydb://session/3?node_id=14&id=ZTBmMTI4NmYtMjA2YjczYWMtNDMyMGQ5N2UtNjMyYzUyNDU=, ActorId: [14:855:2685], ActorState: ExecuteState, TraceId: 01k950gged1mnpvep7v25wt1wz, Create QueryResponse for error on request, msg: ... blocking NKikimr::NLongTxService::TEvLongTxService::TEvLockStatus from LONG_TX_SERVICE to TX_DATASHARD_ACTOR cookie 0 2025-11-03T14:04:06.491053Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 65543, Sender [14:590:2518], Recipient [14:665:2573]: NActors::TEvents::TEvPoison 2025-11-03T14:04:06.491231Z node 14 :TX_DATASHARD INFO: datashard.cpp:189: OnDetach: 72075186224037888 2025-11-03T14:04:06.491244Z node 14 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2025-11-03T14:04:06.503930Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [14:1012:2814], Recipient [14:1014:2815]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:04:06.504356Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [14:1012:2814], Recipient [14:1014:2815]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:04:06.504386Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828684, Sender [14:1012:2814], Recipient [14:1014:2815]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:04:06.505872Z node 14 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [14:1014:2815] 2025-11-03T14:04:06.505952Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:04:06.506437Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:04:06.506905Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:04:06.507134Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:04:06.507144Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:04:06.507150Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:04:06.507223Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:04:06.507302Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:04:06.507312Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:04:06.507327Z node 14 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state PreOffline tabletId 72075186224037888 2025-11-03T14:04:06.507354Z node 14 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 1 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-11-03T14:04:06.507364Z node 14 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-11-03T14:04:06.507388Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [14:1028:2822] 2025-11-03T14:04:06.507392Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:04:06.507397Z node 14 :TX_DATASHARD INFO: datashard.cpp:1292: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2025-11-03T14:04:06.507400Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:04:06.507481Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 275709965, Sender [14:69:2116], Recipient [14:1014:2815]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 14 Status: STATUS_NOT_FOUND 2025-11-03T14:04:06.507589Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [14:1014:2815], Recipient [14:1014:2815]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:04:06.507598Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:04:06.507627Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435075, Sender [14:1014:2815], Recipient [14:1014:2815]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressResendReadSet 2025-11-03T14:04:06.507634Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3185: StateWork, processing event TEvPrivate::TEvProgressResendReadSet 2025-11-03T14:04:06.507945Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 270270976, Sender [14:26:2073], Recipient [14:1014:2815]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 500} 2025-11-03T14:04:06.507952Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-11-03T14:04:06.507957Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 500 2025-11-03T14:04:06.507962Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:04:06.508028Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:04:06.508033Z node 14 :TX_DATASHARD INFO: datashard__progress_tx.cpp:21: Progress tx at non-ready tablet 72075186224037888 state 5 2025-11-03T14:04:06.508072Z node 14 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037888 2025-11-03T14:04:06.508078Z node 14 :TX_DATASHARD INFO: datashard.cpp:4131: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2025-11-03T14:04:06.508084Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2025-11-03T14:04:06.508137Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287425, Sender [14:1014:2815], Recipient [14:916:2728]: {TEvReadSet step# 400 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-11-03T14:04:06.508141Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3176: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-11-03T14:04:06.508146Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-11-03T14:04:06.508158Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 400 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-11-03T14:04:06.508165Z node 14 :TX_DATASHARD NOTICE: datashard_pipeline.cpp:734: Outdated readset for 400:281474976715663 at 72075186224037889 2025-11-03T14:04:06.508173Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2025-11-03T14:04:06.508177Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037889 {TEvReadSet step# 400 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-11-03T14:04:06.508190Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:04:06.508205Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 270270978, Sender [14:26:2073], Recipient [14:1014:2815]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 300 NextReadStep# 500 ReadStep# 500 } 2025-11-03T14:04:06.508209Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3196: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-11-03T14:04:06.508213Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 300 next step 500 2025-11-03T14:04:06.508231Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287938, Sender [14:916:2728], Recipient [14:1014:2815]: {TEvReadSet step# 400 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-11-03T14:04:06.508235Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3177: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-11-03T14:04:06.508239Z node 14 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2025-11-03T14:04:06.508247Z node 14 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-11-03T14:04:06.715484Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> test.py::test[ql_filter-integer_many_noskiff--ForceBlocks] [GOOD] >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH16 >> KqpJoin::JoinDupColumnRight [GOOD] Test command err: Trying to start YDB, gRPC: 26968, MsgBus: 20970 2025-11-03T14:03:59.559846Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499625762861407:2071];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:59.560186Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e0d/r3tmp/tmpdZdIUs/pdisk_1.dat 2025-11-03T14:03:59.589034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:59.601819Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:59.601981Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499625762861374:2081] 1762178639559133 != 1762178639559136 TServer::EnableGrpc on GrpcPort 26968, node 1 2025-11-03T14:03:59.609858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:59.609869Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:59.609870Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:59.609898Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20970 TClient is connected to server localhost:20970 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:59.667224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:59.667255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:59.668332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:59.670010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:03:59.784124Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:59.948185Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499625762862055:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:59.948189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499625762862066:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:59.948206Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:59.948268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499625762862069:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:59.948282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:59.949036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:59.956581Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499625762862070:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:04:00.034432Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499630057829419:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:00.081128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-11-03T14:04:00.105401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499630057829625:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:04:00.105465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499630057829625:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:04:00.105543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499630057829625:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:04:00.105578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499630057829625:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:04:00.105607Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499630057829625:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:04:00.105633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499630057829625:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:04:00.105661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499630057829625:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:04:00.105690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499630057829625:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:04:00.105716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499630057829625:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:04:00.105743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499630057829625:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:04:00.105775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499630057829625:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:04:00.105798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499630057829625:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:04:00.105824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499630057829625:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:04:00.105823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499630057829624:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:04:00.105842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499630057829624:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:04:00.105927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499630057829624:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:04:00.105964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499630057829624:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLAS ... tion=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:04:04.386403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:04:04.386410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:04:04.386414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:04:04.386420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:04:04.386422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:04:04.386495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:04:04.386502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:04:04.386508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:04:04.386511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:04:04.386523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:04:04.386529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:04:04.386535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:04:04.386537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:04:04.386541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:04:04.386543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:04:04.386546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:04:04.386549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:04:04.386562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:04:04.386565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:04:04.386572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:04:04.386575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:04:04.386579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:04:04.386581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:04:04.386585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:04:04.386588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:04:04.386595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:04:04.386600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:04:04.386606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:04:04.386608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:04:04.388298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7568499648624092394:2338];ev=NActors::IEventHandle;tablet_id=72075186224037895;tx_id=281474976710660;this=55599292687632;method=TTxController::StartProposeOnExecute;tx_info=281474976710660:TX_KIND_SCHEMA;min=1762178644387;max=18446744073709551615;plan=0;src=[1:7568499644329124481:2146];cookie=82:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-11-03T14:04:04.388541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7568499648624092447:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:04:04.388572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7568499648624092447:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:04:04.388609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7568499648624092447:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:04:04.388630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7568499648624092447:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:04:04.388648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7568499648624092447:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:04:04.388667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7568499648624092447:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:04:04.388690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7568499648624092447:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:04:04.388709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7568499648624092447:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:04:04.388736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7568499648624092447:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:04:04.388764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7568499648624092447:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:04:04.388790Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7568499648624092447:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:04:04.388804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7568499648624092447:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:04:04.388816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7568499648624092447:2349];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; VERIFY failed (2025-11-03T14:04:04.388752Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7F5047AB0AC2 13. ??:0: ?? @ 0x7F5047B428BF |56.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_donor/test-results/unittest/{meta.json ... results_accumulator.log} >> CommitOffset::Commit_Flat_WithWrongSession_ToPast [GOOD] >> CommitOffset::Commit_WithSession_ParentNotFinished_SameSession |56.3%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |56.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |56.3%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part2/pytest >> test.py::test[aggregate-list_after_group-default.txt-Results] [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] |56.4%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |56.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH3 Test command err: Trying to start YDB, gRPC: 3185, MsgBus: 21557 2025-11-03T14:04:04.425656Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499648169968765:2069];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:04.425835Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e04/r3tmp/tmpcSsvKa/pdisk_1.dat 2025-11-03T14:04:04.456090Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:04.467095Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:04.467297Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499648169968736:2081] 1762178644425286 != 1762178644425289 TServer::EnableGrpc on GrpcPort 3185, node 1 2025-11-03T14:04:04.475152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:04.475162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:04.475164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:04.475200Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:21557 TClient is connected to server localhost:21557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:04.533304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:04.533332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:04.534480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:04.535516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:04.642006Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:04.772201Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499648169969428:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.772214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499648169969417:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.772241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.772316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499648169969432:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.772336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.772862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:04.774323Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499648169969431:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:04:04.873015Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499648169969484:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:04.928317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-11-03T14:04:04.949049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648169969705:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:04:04.949090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648169969705:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:04:04.949116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648169969705:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:04:04.949133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648169969705:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:04:04.949154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648169969705:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:04:04.949174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648169969705:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:04:04.949208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648169969705:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:04:04.949231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648169969705:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:04:04.949244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648169969705:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:04:04.949256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648169969705:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:04:04.949272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648169969705:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:04:04.949291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648169969705:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:04:04.949309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648169969705:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:04:04.949449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499648169969704:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:04:04.949474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499648169969704:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:04:04.949520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499648169969704:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:04:04.949542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499648169969704:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_ ... ion=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:04:04.980564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:04:04.980578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:04:04.980587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:04:04.980597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:04:04.980603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:04:04.980859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648169969814:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:04:04.980886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648169969814:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:04:04.980964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648169969814:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:04:04.981013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648169969814:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:04:04.981036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648169969814:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:04:04.981059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648169969814:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:04:04.981109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648169969814:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:04:04.981142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648169969814:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:04:04.981166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648169969814:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:04:04.981189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648169969814:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:04:04.981210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648169969814:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:04:04.981232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648169969814:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:04:04.981253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648169969814:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:04:04.981768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:04:04.981786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:04:04.981798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:04:04.981802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:04:04.981818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:04:04.981827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:04:04.981836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:04:04.981841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:04:04.981848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:04:04.981858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:04:04.981863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:04:04.981872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:04:04.981892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:04:04.981902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:04:04.981913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:04:04.981921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:04:04.981927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:04:04.981932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:04:04.981941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:04:04.981946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:04:04.981958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:04:04.981967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:04:04.981977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:04:04.981985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:04:04.983054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499648169969704:2336];ev=NActors::IEventHandle;tablet_id=72075186224037888;tx_id=281474976715660;this=126662732152224;method=TTxController::StartProposeOnExecute;tx_info=281474976715660:TX_KIND_SCHEMA;min=1762178644981;max=18446744073709551615;plan=0;src=[1:7568499648169969086:2147];cookie=12:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; VERIFY failed (2025-11-03T14:04:04.982342Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7F29E7FD1AC2 13. ??:0: ?? @ 0x7F29E80638BF ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+ColumnStore Test command err: Trying to start YDB, gRPC: 6484, MsgBus: 64008 2025-11-03T14:04:04.184605Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499649144926674:2075];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:04.184826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e07/r3tmp/tmpSLjBOF/pdisk_1.dat 2025-11-03T14:04:04.217093Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:04.232352Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:04.232507Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499649144926637:2081] 1762178644183817 != 1762178644183820 TServer::EnableGrpc on GrpcPort 6484, node 1 2025-11-03T14:04:04.237821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:04.237839Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:04.237842Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:04.237889Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64008 TClient is connected to server localhost:64008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:04.294974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:04.295002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:04.295626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:04.295940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... 2025-11-03T14:04:04.494808Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:04.535472Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499649144927315:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.535474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499649144927326:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.535495Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.535542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499649144927330:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.535556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.536085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:04.537438Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499649144927329:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:04:04.603696Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499649144927382:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:04.646838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-11-03T14:04:04.670675Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499649144927598:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:04:04.670747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499649144927598:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:04:04.670793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499649144927598:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:04:04.670823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499649144927598:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:04:04.670853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499649144927598:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:04:04.670896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499649144927598:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:04:04.670920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499649144927598:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:04:04.670939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499649144927598:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:04:04.670957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499649144927598:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:04:04.670971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499649144927598:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:04:04.670988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499649144927598:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:04:04.671008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499649144927598:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:04:04.671026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7568499649144927598:2336];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:04:04.671201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499649144927599:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:04:04.671244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499649144927599:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:04:04.671293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499649144927599:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:04:04.671316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499649144927599:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_ ... ion=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:04:04.708155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:04:04.708167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:04:04.708171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:04:04.708180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:04:04.708183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:04:04.709603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7568499649144927714:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:04:04.709629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7568499649144927714:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:04:04.709668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7568499649144927714:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:04:04.709693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7568499649144927714:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:04:04.709716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7568499649144927714:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:04:04.709738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7568499649144927714:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:04:04.709765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7568499649144927714:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:04:04.709789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7568499649144927714:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:04:04.709811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7568499649144927714:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:04:04.709836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7568499649144927714:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:04:04.709858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7568499649144927714:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:04:04.709885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7568499649144927714:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:04:04.709908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7568499649144927714:2351];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:04:04.710467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:04:04.710482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:04:04.710486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499649144927599:2337];ev=NActors::IEventHandle;tablet_id=72075186224037889;tx_id=281474976715660;this=127386434891776;method=TTxController::StartProposeOnExecute;tx_info=281474976715660:TX_KIND_SCHEMA;min=1762178644709;max=18446744073709551615;plan=0;src=[1:7568499649144926984:2147];cookie=22:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=6;result=not_found; 2025-11-03T14:04:04.710496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:04:04.710502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:04:04.710526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:04:04.710535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:04:04.710548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:04:04.710557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:04:04.710566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:04:04.710576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:04:04.710582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:04:04.710592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:04:04.710613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:04:04.710640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:04:04.710658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:04:04.710668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:04:04.710674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:04:04.710683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:04:04.710690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:04:04.710700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:04:04.710715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:04:04.710724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:04:04.710735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:04:04.710743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; VERIFY failed (2025-11-03T14:04:04.710925Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7FD4DAC30AC2 13. ??:0: ?? @ 0x7FD4DACC28BF |56.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::test[join-premap_common_left_cross--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationManyKeysJoinPredicate+EnableSeparationComputeActorsFromRead Test command err: Trying to start YDB, gRPC: 14233, MsgBus: 24626 2025-11-03T14:04:04.081472Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499648946145298:2066];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:04.081655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e06/r3tmp/tmpHIrFwq/pdisk_1.dat 2025-11-03T14:04:04.117452Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 14233, node 1 2025-11-03T14:04:04.135617Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:04.135922Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499648946145266:2081] 1762178644081239 != 1762178644081242 2025-11-03T14:04:04.146951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:04.146969Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:04.146971Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:04.147036Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:24626 2025-11-03T14:04:04.184899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:04.184935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:04.185959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:04.201462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:04.277622Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:04.431636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499648946145942:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.431643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499648946145953:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.431665Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.431742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499648946145957:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.431766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:04.432454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:04.434003Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499648946145956:2323], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:04:04.507196Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499648946146009:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:04.548358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2025-11-03T14:04:04.570822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648946146218:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:04:04.570884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648946146218:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:04:04.570920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648946146218:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:04:04.570939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648946146218:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:04:04.570957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648946146218:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:04:04.570974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648946146218:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:04:04.570994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648946146218:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:04:04.571012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648946146218:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:04:04.571034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648946146218:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:04:04.571055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648946146218:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:04:04.571073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648946146218:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:04:04.571092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648946146218:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:04:04.571109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7568499648946146218:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:04:04.571541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:04:04.571554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:04:04.571565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:04:04.571568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:04:04.571581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished; ... er_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:04:04.616025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:04:04.616037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:04:04.616046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:04:04.616069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:04:04.616078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:04:04.616094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:04:04.616103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:04:04.616458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:04:04.616471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:04:04.616491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:04:04.616505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:04:04.616528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:04:04.616539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:04:04.616551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:04:04.616556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:04:04.616569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:04:04.616578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:04:04.616585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:04:04.616595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:04:04.616624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:04:04.616636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:04:04.616650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:04:04.616660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:04:04.616670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:04:04.616679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:04:04.616689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:04:04.616698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:04:04.616718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:04:04.616728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2025-11-03T14:04:04.616740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2025-11-03T14:04:04.616744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-11-03T14:04:04.689819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648946146388:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:04:04.689865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648946146388:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:04:04.689962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648946146388:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:04:04.689988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648946146388:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:04:04.690016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648946146388:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:04:04.690049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648946146388:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:04:04.690071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648946146388:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:04:04.690105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648946146388:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:04:04.690133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648946146388:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:04:04.690150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648946146388:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:04:04.690170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648946146388:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:04:04.690190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648946146388:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:04:04.690208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7568499648946146388:2351];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; VERIFY failed (2025-11-03T14:04:04.617814Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed 0. /-S/util/system/yassert.cpp:86: InternalPanicImpl @ 0x14E11665 1. /-S/util/system/yassert.cpp:55: Panic @ 0x14E07F06 2. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:45: TClient @ 0x17369EE2 3. /tmp//-S/ydb/core/tablet/tablet_pipe_client.cpp:733: ?? @ 0x17369D01 4. /tmp//-S/ydb/core/tx/columnshard/columnshard.cpp:510: Handle @ 0x2114AAAB 5. /-S/ydb/core/tx/columnshard/columnshard_impl.h:445: StateWork @ 0x21142789 6. /tmp//-S/ydb/library/actors/core/actor.cpp:350: Receive @ 0x15F9EE67 7. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:267: Execute @ 0x15FED664 8. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:455: operator() @ 0x15FF1794 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:507: ProcessExecutorPool @ 0x15FF13A0 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:533: ThreadProc @ 0x15FF1F8E 11. /-S/util/system/thread.cpp:245: ThreadProxy @ 0x14E135BC 12. ??:0: ?? @ 0x7F2507A55AC2 13. ??:0: ?? @ 0x7F2507AE78BF ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinDupColumnRight [GOOD] Test command err: Trying to start YDB, gRPC: 64336, MsgBus: 11794 2025-11-03T14:04:05.821071Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499653119568747:2067];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:05.821174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001dfb/r3tmp/tmpcWQpcE/pdisk_1.dat 2025-11-03T14:04:05.868752Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:05.869353Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:05.869555Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499653119568720:2081] 1762178645820836 != 1762178645820839 TServer::EnableGrpc on GrpcPort 64336, node 1 2025-11-03T14:04:05.878592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:05.878609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:05.878611Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:05.878677Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:11794 2025-11-03T14:04:05.924306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:05.924338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:05.925393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:05.942056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:05.960165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:05.981232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:06.006413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:06.020979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:06.068828Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:06.245884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499657414537662:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.245921Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.246020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499657414537672:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.246027Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.307618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.315269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.327992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.341748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.355996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.369891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.383900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.398376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.415026Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499657414538534:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.415049Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.415058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499657414538539:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.415082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499657414538541:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.415087Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.415786Z node 1 :FLAT_TX_SCHEMESHARD ... 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:07.463096Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:07.472541Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:07.487843Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:07.509043Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:07.521758Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:07.668491Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:07.877446Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499660137950067:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.877575Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.880219Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499660137950150:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.880233Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.880937Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.888803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.903225Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.916524Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.931086Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.945034Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.959391Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.973830Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.990332Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499660137950939:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.990372Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.990381Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499660137950944:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.990407Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7568499660137950946:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.990414Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.991280Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:07.999905Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7568499660137950947:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-11-03T14:04:08.066414Z node 2 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [2:7568499664432918296:3552] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:08.347306Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:08.356210Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:08.365100Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:08.386268Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> test.py::test[type_v3-mergejoin_with_sort--Results] [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] Test command err: === Server->StartServer(false); 2025-11-03T14:04:06.716140Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499656191057580:2083];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:06.716549Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:04:06.721423Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7568499655075266437:2183];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0025b6/r3tmp/tmpp9OSro/pdisk_1.dat 2025-11-03T14:04:06.723756Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:04:06.725845Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:04:06.725932Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:04:06.767103Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:06.767848Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:06.777035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:06.777065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:06.782504Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:06.789122Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-11-03T14:04:06.791473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20712, node 1 2025-11-03T14:04:06.799960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ud3d/0025b6/r3tmp/yandexXTpdH7.tmp 2025-11-03T14:04:06.799972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ud3d/0025b6/r3tmp/yandexXTpdH7.tmp 2025-11-03T14:04:06.803245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ud3d/0025b6/r3tmp/yandexXTpdH7.tmp 2025-11-03T14:04:06.803299Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:04:06.804354Z INFO: TTestServer started on Port 5740 GrpcPort 20712 2025-11-03T14:04:06.818832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:06.818875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:5740 2025-11-03T14:04:06.820588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:20712 === TenantModeEnabled() = 1 === Init PQ - start server on port 20712 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:06.846972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-11-03T14:04:06.847043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:06.847114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-11-03T14:04:06.847127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-11-03T14:04:06.847204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-11-03T14:04:06.847224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:06.848069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-11-03T14:04:06.848134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-11-03T14:04:06.848176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:06.848189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-11-03T14:04:06.848192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-11-03T14:04:06.848195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-11-03T14:04:06.848721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:06.848756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-11-03T14:04:06.848762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710657:0 3 -> 128 2025-11-03T14:04:06.848865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-11-03T14:04:06.848876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-11-03T14:04:06.848881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-11-03T14:04:06.849278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:06.849289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:06.849294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-11-03T14:04:06.849299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-11-03T14:04:06.850237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:06.850799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-11-03T14:04:06.850861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-11-03T14:04:06.851618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1762178646900, transactions count in step: 1, at schemeshard: 72057594046644480 2025-11-03T14:04:06.851663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1762178646900 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-11-03T14:04:06.851675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-11-03T14:04:06.851760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710657:0 128 -> 240 2025-11-03T14:04:06.851774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-11-03T14:04:06.851814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-11-03T14:04:06.85 ... MISSIONS 2025-11-03T14:04:08.776266Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_0@builtin \003\n\031\010\001\022\025\032\023test_user_1@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_1@builtin \003\n\031\010\001\022\025\032\023test_user_2@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_2@builtin \003" } } TxId: 281474976720665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:53892" , at schemeshard: 72057594046644480 2025-11-03T14:04:08.776314Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_modify_acl.cpp:33: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976720665:0, at schemeshard: 72057594046644480 2025-11-03T14:04:08.776345Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5606: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-11-03T14:04:08.776351Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5622: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-11-03T14:04:08.776390Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976720665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-11-03T14:04:08.776401Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976720665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-11-03T14:04:08.776418Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720665:0 progress is 1/1 2025-11-03T14:04:08.776420Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720665 ready parts: 1/1 2025-11-03T14:04:08.776424Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720665:0 progress is 1/1 2025-11-03T14:04:08.776425Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720665 ready parts: 1/1 2025-11-03T14:04:08.776436Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-11-03T14:04:08.776447Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720665, ready parts: 1/1, is published: false 2025-11-03T14:04:08.776452Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-11-03T14:04:08.776454Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720665 ready parts: 1/1 2025-11-03T14:04:08.776457Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976720665:0 2025-11-03T14:04:08.776461Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976720665, publications: 1, subscribers: 0 2025-11-03T14:04:08.776463Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976720665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2025-11-03T14:04:08.777387Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976720665, response: Status: StatusSuccess TxId: 281474976720665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-11-03T14:04:08.777473Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user_0@builtin, add access: +W:test_user_1@builtin, add access: +W:test_user_2@builtin, remove access: -():test_user_0@builtin:-, remove access: -():test_user_1@builtin:-, remove access: -():test_user_2@builtin:- 2025-11-03T14:04:08.777521Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-11-03T14:04:08.777529Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-11-03T14:04:08.777581Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-11-03T14:04:08.777589Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7568499667103304063:2373], at schemeshard: 72057594046644480, txId: 281474976720665, path id: 10 2025-11-03T14:04:08.777732Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976720665 2025-11-03T14:04:08.777753Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976720665 2025-11-03T14:04:08.777769Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976720665 2025-11-03T14:04:08.777772Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2025-11-03T14:04:08.777776Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-11-03T14:04:08.777799Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976720665, subscribers: 0 2025-11-03T14:04:08.778139Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2025-11-03T14:04:08.778150Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2025-11-03T14:04:08.778239Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720665 2025-11-03T14:04:08.778276Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2025-11-03T14:04:08.778296Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:447: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:53882 2025-11-03T14:04:08.778306Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1538: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:53882 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-11-03T14:04:08.778309Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2025-11-03T14:04:08.778518Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 2 sessionId: describe result for acl check 2025-11-03T14:04:08.778568Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-11-03T14:04:08.778574Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-11-03T14:04:08.778576Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-11-03T14:04:08.778589Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7568499667103305110:2375] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-11-03T14:04:08.778594Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-11-03T14:04:08.778734Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:822: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-11-03T14:04:08.778773Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie test-group-id|4c5c8e94-afb2e94e-1348aac6-319eb546_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2025-11-03T14:04:08.778946Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|4c5c8e94-afb2e94e-1348aac6-319eb546_0 2025-11-03T14:04:08.779257Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-group-id|4c5c8e94-afb2e94e-1348aac6-319eb546_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-11-03T14:04:08.779303Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-group-id|4c5c8e94-afb2e94e-1348aac6-319eb546_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-11-03T14:04:08.779313Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:815: session v1 error cookie: 2 reason: got another 'update_token_request' while previous still in progress, only single token update is allowed at a time sessionId: test-group-id|4c5c8e94-afb2e94e-1348aac6-319eb546_0 2025-11-03T14:04:08.779360Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-group-id|4c5c8e94-afb2e94e-1348aac6-319eb546_0 is DEAD 2025-11-03T14:04:08.779430Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison >> BasicStatistics::Serverless [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] |56.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |56.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest |56.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |56.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |56.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[ql_filter-integer_many_noskiff--ForceBlocks] [GOOD] >> KqpIndexLookupJoin::RightSemi-StreamLookup [GOOD] >> TPersQueueMirrorer::TestBasicRemote [GOOD] >> TPersQueueMirrorer::ValidStartStream >> ColumnBuildTest::BaseCase [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] |56.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |56.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [GOOD] Test command err: 2025-11-03T14:03:38.783536Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:38.806856Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:38.806906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:38.806926Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0020c9/r3tmp/tmpBk2stw/pdisk_1.dat 2025-11-03T14:03:38.891252Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:38.924499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:38.924531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:38.946027Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30507, node 1 2025-11-03T14:03:39.005055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:39.005075Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:39.005079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:39.005197Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:03:39.007696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:39.043057Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:65072 2025-11-03T14:03:39.363042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-11-03T14:03:40.234933Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:40.237283Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-11-03T14:03:40.238419Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:40.248360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:40.248401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:40.282197Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-11-03T14:03:40.282755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:40.353694Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:40.353732Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:40.354095Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:40.354271Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:40.354408Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:40.354601Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:40.354679Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:40.354723Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:40.354800Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:40.354834Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:40.354856Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:40.367006Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:40.413848Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:40.420089Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-11-03T14:03:40.420118Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-11-03T14:03:40.425980Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-11-03T14:03:40.426150Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-11-03T14:03:40.426168Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-11-03T14:03:40.426173Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-11-03T14:03:40.426177Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-11-03T14:03:40.426182Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-11-03T14:03:40.426186Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-11-03T14:03:40.426192Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-11-03T14:03:40.426333Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-11-03T14:03:40.428292Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8310: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-11-03T14:03:40.428312Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8340: ConnectToSA(), pipe client id: [2:1827:2584], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-11-03T14:03:40.429709Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1850:2598] 2025-11-03T14:03:40.429755Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1850:2598], schemeshard id = 72075186224037897 2025-11-03T14:03:40.442047Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1879:2606] 2025-11-03T14:03:40.442673Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-11-03T14:03:40.445469Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-11-03T14:03:40.445480Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-11-03T14:03:40.445491Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-11-03T14:03:40.446827Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:306: Path does not belong to the specified domain: self# [2:1963:2646], domain# [OwnerId: 72057594046644480, LocalPathId: 2], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-11-03T14:03:40.447590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:40.449310Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-11-03T14:03:40.449343Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-11-03T14:03:40.536595Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:40.589677Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-11-03T14:03:40.613176Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-11-03T14:03:40.680637Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-11-03T14:03:40.785827Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-11-03T14:03:41.515629Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:41.539001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-11-03T14:03:42.061757Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:42.077042Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8254: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224 ... uestId[ 29 ] 2025-11-03T14:04:04.201610Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 29, ReplyToActorId = [2:3935:3456], StatRequests.size() = 1 2025-11-03T14:04:04.222502Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-11-03T14:04:04.222536Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-11-03T14:04:04.222544Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-11-03T14:04:04.222548Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:752: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-11-03T14:04:04.222706Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:3937:3458], ActorId: [2:3938:3459], Starting query actor #1 [2:3939:3460] 2025-11-03T14:04:04.222721Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:3938:3459], ActorId: [2:3939:3460], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2025-11-03T14:04:04.225542Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:3938:3459], ActorId: [2:3939:3460], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NGFmYmI2NDQtYjllZTg4ODEtNjViMzYwYTMtMjMzZDhjODU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-11-03T14:04:04.232091Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:3948:3469]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:04.232168Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2025-11-03T14:04:04.232214Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:177: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-11-03T14:04:04.232219Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-11-03T14:04:04.232246Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-11-03T14:04:04.232259Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 30, ReplyToActorId = [2:3948:3469], StatRequests.size() = 1 2025-11-03T14:04:04.232281Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-11-03T14:04:04.249198Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:3938:3459], ActorId: [2:3939:3460], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGFmYmI2NDQtYjllZTg4ODEtNjViMzYwYTMtMjMzZDhjODU=, TxId: 2025-11-03T14:04:04.249226Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:3938:3459], ActorId: [2:3939:3460], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGFmYmI2NDQtYjllZTg4ODEtNjViMzYwYTMtMjMzZDhjODU=, TxId: 2025-11-03T14:04:04.249386Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:3937:3458], ActorId: [2:3938:3459], Got response [2:3939:3460] SUCCESS 2025-11-03T14:04:04.249452Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-11-03T14:04:04.260909Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-11-03T14:04:04.260936Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-11-03T14:04:04.344170Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-11-03T14:04:04.344213Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-11-03T14:04:04.417678Z node 2 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [2:3001:3064], schemeshard count = 1 2025-11-03T14:04:04.730455Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8442: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2025-11-03T14:04:04.730488Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.697000s, at schemeshard: 72075186224037899 2025-11-03T14:04:04.730565Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2025-11-03T14:04:04.741962Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-11-03T14:04:05.206006Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4008:3502]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:05.206159Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2025-11-03T14:04:05.206169Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 31, ReplyToActorId = [2:4008:3502], StatRequests.size() = 1 2025-11-03T14:04:06.118003Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4041:3517]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:06.118129Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2025-11-03T14:04:06.118141Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 32, ReplyToActorId = [2:4041:3517], StatRequests.size() = 1 2025-11-03T14:04:06.139149Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:304: [72075186224037894] EvPropagateTimeout 2025-11-03T14:04:06.139205Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-11-03T14:04:06.139211Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-11-03T14:04:06.139221Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-11-03T14:04:06.139227Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:752: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-11-03T14:04:06.139316Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4043:3519], ActorId: [2:4044:3520], Starting query actor #1 [2:4045:3521] 2025-11-03T14:04:06.139329Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4044:3520], ActorId: [2:4045:3521], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2025-11-03T14:04:06.140157Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4044:3520], ActorId: [2:4045:3521], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MzBkYmQ2ZWItMmZjZmUwZTItZjNlODNhNDctOWUyZWFmZDI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-11-03T14:04:06.151299Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4044:3520], ActorId: [2:4045:3521], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzBkYmQ2ZWItMmZjZmUwZTItZjNlODNhNDctOWUyZWFmZDI=, TxId: 2025-11-03T14:04:06.151330Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4044:3520], ActorId: [2:4045:3521], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzBkYmQ2ZWItMmZjZmUwZTItZjNlODNhNDctOWUyZWFmZDI=, TxId: 2025-11-03T14:04:06.151443Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4043:3519], ActorId: [2:4044:3520], Got response [2:4045:3521] SUCCESS 2025-11-03T14:04:06.151699Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-11-03T14:04:06.163280Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-11-03T14:04:06.163305Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-11-03T14:04:07.120671Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4105:3550]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:07.120798Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2025-11-03T14:04:07.120808Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 33, ReplyToActorId = [2:4105:3550], StatRequests.size() = 1 2025-11-03T14:04:08.209364Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4138:3566]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:08.209468Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2025-11-03T14:04:08.209478Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 34, ReplyToActorId = [2:4138:3566], StatRequests.size() = 1 2025-11-03T14:04:08.230277Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-11-03T14:04:08.230366Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 7 2025-11-03T14:04:08.230424Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-11-03T14:04:08.230430Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-11-03T14:04:08.230520Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 0 2025-11-03T14:04:08.230590Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2025-11-03T14:04:08.251449Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8442: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-11-03T14:04:08.251480Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-11-03T14:04:08.251567Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-11-03T14:04:08.262975Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-11-03T14:04:09.181607Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4170:3580]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:09.181760Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2025-11-03T14:04:09.181770Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 35, ReplyToActorId = [2:4170:3580], StatRequests.size() = 1 |56.4%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_rtmr_reboots/unittest |56.4%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/dq_file/part13/pytest >> test.py::test[join-premap_common_left_cross--Results] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] Test command err: === Server->StartServer(false); 2025-11-03T14:04:07.227781Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499659393309955:2156];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:07.227872Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:04:07.232750Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7568499659079976892:2087];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:07.232966Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0025ec/r3tmp/tmpOxm9O0/pdisk_1.dat 2025-11-03T14:04:07.235410Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:04:07.238682Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:04:07.280945Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:07.296939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:07.296970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:07.297749Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:07.297837Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:07.298452Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-11-03T14:04:07.298787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18493, node 1 2025-11-03T14:04:07.313591Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ud3d/0025ec/r3tmp/yandexrYwcJ6.tmp 2025-11-03T14:04:07.313609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ud3d/0025ec/r3tmp/yandexrYwcJ6.tmp 2025-11-03T14:04:07.313696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ud3d/0025ec/r3tmp/yandexrYwcJ6.tmp 2025-11-03T14:04:07.313754Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:04:07.318081Z INFO: TTestServer started on Port 61539 GrpcPort 18493 2025-11-03T14:04:07.329525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:07.329561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:07.331077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61539 PQClient connected to localhost:18493 === TenantModeEnabled() = 1 === Init PQ - start server on port 18493 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:07.365764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-11-03T14:04:07.365847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:07.365916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-11-03T14:04:07.365931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-11-03T14:04:07.365999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-11-03T14:04:07.366043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:07.366777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-11-03T14:04:07.366865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-11-03T14:04:07.366920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:07.366936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-11-03T14:04:07.366939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-11-03T14:04:07.366943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-11-03T14:04:07.367496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:07.367516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-11-03T14:04:07.367521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710657:0 3 -> 128 2025-11-03T14:04:07.367639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-11-03T14:04:07.367655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-11-03T14:04:07.367659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-11-03T14:04:07.368114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:07.368127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:07.368130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-11-03T14:04:07.368135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-11-03T14:04:07.368928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:07.369550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-11-03T14:04:07.369605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-11-03T14:04:07.370314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1762178647418, transactions count in step: 1, at schemeshard: 72057594046644480 2025-11-03T14:04:07.370361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1762178647418 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-11-03T14:04:07.370375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-11-03T14:04:07.370459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976710657:0 128 -> 240 2025-11-03T14:04:07.370471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-11-03T14:04:07.370502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-11-03T14:0 ... FY PERMISSIONS 2025-11-03T14:04:09.337077Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\027\010\001\022\023\032\021test_user@builtin\n\037\010\000\022\033\010\001\020\366\213\001\032\021test_user@builtin \003" } } TxId: 281474976720665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:38588" , at schemeshard: 72057594046644480 2025-11-03T14:04:09.337146Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_modify_acl.cpp:33: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976720665:0, at schemeshard: 72057594046644480 2025-11-03T14:04:09.337187Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5606: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-11-03T14:04:09.337194Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5622: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-11-03T14:04:09.337242Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976720665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-11-03T14:04:09.337257Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976720665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-11-03T14:04:09.337285Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720665:0 progress is 1/1 2025-11-03T14:04:09.337292Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720665 ready parts: 1/1 2025-11-03T14:04:09.337302Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976720665:0 progress is 1/1 2025-11-03T14:04:09.337305Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720665 ready parts: 1/1 2025-11-03T14:04:09.337321Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-11-03T14:04:09.337338Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720665, ready parts: 1/1, is published: false 2025-11-03T14:04:09.337354Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-11-03T14:04:09.337362Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720665 ready parts: 1/1 2025-11-03T14:04:09.337367Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976720665:0 2025-11-03T14:04:09.337372Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976720665, publications: 1, subscribers: 0 2025-11-03T14:04:09.337375Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976720665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2025-11-03T14:04:09.338502Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976720665, response: Status: StatusSuccess TxId: 281474976720665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-11-03T14:04:09.338598Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user@builtin, remove access: -():test_user@builtin:- 2025-11-03T14:04:09.338679Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-11-03T14:04:09.338688Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-11-03T14:04:09.338746Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-11-03T14:04:09.338755Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7568499664975578416:2378], at schemeshard: 72057594046644480, txId: 281474976720665, path id: 10 2025-11-03T14:04:09.338941Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976720665 2025-11-03T14:04:09.338959Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976720665 2025-11-03T14:04:09.338966Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976720665 2025-11-03T14:04:09.338971Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2025-11-03T14:04:09.338976Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-11-03T14:04:09.339010Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976720665, subscribers: 0 2025-11-03T14:04:09.339584Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720665 2025-11-03T14:04:09.342908Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2025-11-03T14:04:09.342923Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2025-11-03T14:04:09.343108Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-message-group" } 2025-11-03T14:04:09.343154Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:447: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-message-group" from ipv6:[::1]:38582 2025-11-03T14:04:09.343166Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1538: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:38582 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-11-03T14:04:09.343170Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2025-11-03T14:04:09.343489Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 2 sessionId: describe result for acl check 2025-11-03T14:04:09.343546Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-11-03T14:04:09.343553Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-11-03T14:04:09.343556Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-11-03T14:04:09.343572Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7568499669270546635:2367] (SourceId=test-message-group, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-11-03T14:04:09.343581Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-11-03T14:04:09.344193Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:822: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 4, Generation: 1 2025-11-03T14:04:09.344447Z node 4 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie test-message-group|a46de7e7-cac88d42-edeef9ac-c6a0e0b1_0 generated for partition 0 topic 'acc/topic1' owner test-message-group 2025-11-03T14:04:09.344819Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group|a46de7e7-cac88d42-edeef9ac-c6a0e0b1_0 2025-11-03T14:04:09.345307Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-message-group|a46de7e7-cac88d42-edeef9ac-c6a0e0b1_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-11-03T14:04:09.345441Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1352: updating token 2025-11-03T14:04:09.345458Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2025-11-03T14:04:09.345701Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 2 sessionId: test-message-group|a46de7e7-cac88d42-edeef9ac-c6a0e0b1_0 describe result for acl check 2025-11-03T14:04:09.345727Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:815: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_2@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-message-group|a46de7e7-cac88d42-edeef9ac-c6a0e0b1_0 2025-11-03T14:04:09.345844Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-message-group|a46de7e7-cac88d42-edeef9ac-c6a0e0b1_0 is DEAD 2025-11-03T14:04:09.345948Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-11-03T14:04:09.682493Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TKesusTest::TestQuoterAccountLabels [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession >> ColumnBuildTest::CancelBuild [GOOD] >> test.py::test[window-lagging/aggregations--Results] [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Results] >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] >> ColumnBuildTest::AlreadyExists [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::RightSemi-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 32021, MsgBus: 62838 2025-11-03T14:04:05.801620Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499651263879280:2068];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:05.801905Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001df8/r3tmp/tmpEzICAp/pdisk_1.dat 2025-11-03T14:04:05.848699Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:05.861127Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:05.861301Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499651263879252:2081] 1762178645801112 != 1762178645801115 TServer::EnableGrpc on GrpcPort 32021, node 1 2025-11-03T14:04:05.870687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:05.870703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:05.870706Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:05.870759Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:62838 2025-11-03T14:04:05.909873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:05.909908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:05.910989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:05.932907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:05.935988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-11-03T14:04:05.962325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:05.982534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:06.006490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:06.020000Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:06.022564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:06.261863Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499655558848195:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.261906Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.262085Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499655558848205:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.262093Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.325888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.335953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.348787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.362834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.376635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.390964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.405204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.419033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:06.435959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499655558849067:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.435999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499655558849072:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.436005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.436056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499655558849075:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:06.436069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, ... : "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:08.920187Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:08.929260Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:08.939259Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:08.953389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:08.967003Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:08.981954Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:08.996752Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:09.003081Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:04:09.011344Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:09.023870Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:09.037472Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:09.051319Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:09.065411Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:09.083148Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:09.114428Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:09.123596Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Fk") '() (Void) '())) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '1207) '('"_id" '"37e6ab9d-6266c92a-d38e9e68-35e79d71") $3)) (let $5 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($25) $25) $4)) (let $6 (DqCnResult (TDqOutput $5 '"0") '())) (let $7 '('('"type" '"generic"))) (let $8 (KqpPhysicalTx '($5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (OptionalType (DataType 'Int32))) (let $11 (ListType (StructType '('"Fk" $10)))) (let $12 '('('"_logical_id" '986) '('"_id" '"56e31a49-8759278a-952e8b84-b284d6d9") $3)) (let $13 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey %kqp%tx_result_binding_0_0 (lambda '($26) (Member $26 '"Fk")) (Void) (Void) (lambda '($27) (Map (Filter (FlatMap $27 (lambda '($28) (Take (Nth $28 '1) (Uint64 '1)))) (lambda '($29) (Exists (Member $29 '"Fk")))) (lambda '($30) (AsStruct '('"Key" (Member $30 '"Fk"))))))))) $12)) (let $14 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $15 (KqpCnStreamLookup (TDqOutput $13 '"0") $14 '('"Key" '"Value") (ListType (StructType '('"Key" $10))) '('('"Strategy" '"LookupRows")))) (let $16 (StructType '('"r.Key" $10) '('"r.Value" (OptionalType (DataType 'String))))) (let $17 '('('"_logical_id" '1164) '('"_id" '"472144dc-141f1be8-f5cf8e24-f4da3472") '('"_wide_channels" $16))) (let $18 (DqPhyStage '($15) (lambda '($31) (block '( (let $32 (lambda '($36) (Member $36 '"r.Key") (Member $36 '"r.Value"))) (return (FromFlow (ExpandMap (ToFlow (Sort (Map (Filter $31 (lambda '($33) (And (Exists (Member $33 '"Key")) (Coalesce (!= (Member $33 '"Value") (String '"Value22")) (Bool 'false))))) (lambda '($34) (AsStruct '('"r.Key" (Member $34 '"Key")) '('"r.Value" (Member $34 '"Value"))))) (Bool 'true) (lambda '($35) (Member $35 '"r.Key")))) $32))) ))) $17)) (let $19 (DqCnMerge (TDqOutput $18 '"0") '('('"0" '"Asc")))) (let $20 (DqPhyStage '($19) (lambda '($37) (FromFlow (NarrowMap (ToFlow $37) (lambda '($38 $39) (AsStruct '('"r.Key" $38) '('"r.Value" $39)))))) '('('"_logical_id" '1176) '('"_id" '"318deab6-ec61b58-cd9eba78-27d2ac5")))) (let $21 '($13 $18 $20)) (let $22 (DqCnResult (TDqOutput $20 '"0") '('"r.Key" '"r.Value"))) (let $23 (KqpTxResultBinding $11 '"0" '"0")) (let $24 (KqpPhysicalTx $21 '($22) '('($9 $23)) $7)) (return (KqpPhysicalQuery '($8 $24) '((KqpTxResultBinding (ListType $16) '1 '"0")) '('('"type" '"query")))) ) ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"Fk" (OptionalType (DataType 'Int32)))))) (let $1 (KqpTable '"/Root/Left" '"72057594046644480:18" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Fk") '() (Void) '())) (let $3 '('"_partition_mode" '"single")) (let $4 '('('"_logical_id" '1240) '('"_id" '"ea70b9a-7bd3a7cd-511adad-b012a97a") $3)) (let $5 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($26) $26) $4)) (let $6 (DqCnResult (TDqOutput $5 '0) '())) (let $7 '('('"type" '"data"))) (let $8 (KqpPhysicalTx '($5) '($6) '() $7)) (let $9 '"%kqp%tx_result_binding_0_0") (let $10 (OptionalType (DataType 'Int32))) (let $11 (ListType (StructType '('"Fk" $10)))) (let $12 '('('"_logical_id" '1017) '('"_id" '"d3f22d41-ce48d2c7-8ee92c26-3a5a6300") $3)) (let $13 (DqPhyStage '() (lambda '() (Iterator (PartitionByKey %kqp%tx_result_binding_0_0 (lambda '($27) (Member $27 '"Fk")) (Void) (Void) (lambda '($28) (Map (Filter (FlatMap $28 (lambda '($29) (Take (Nth $29 '1) (Uint64 '1)))) (lambda '($30) (Exists (Member $30 '"Fk")))) (lambda '($31) (AsStruct '('"Key" (Member $31 '"Fk"))))))))) $12)) (let $14 (KqpTable '"/Root/Right" '"72057594046644480:19" '"" '1)) (let $15 (KqpCnStreamLookup (TDqOutput $13 '0) $14 '('"Key" '"Value") (ListType (StructType '('"Key" $10))) '('('"Strategy" '"LookupRows")))) (let $16 (Uint64 '"1001")) (let $17 (StructType '('"r.Key" $10) '('"r.Value" (OptionalType (DataType 'String))))) (let $18 '('('"_logical_id" '1195) '('"_id" '"82c4d2b-23887af3-f11e282c-4e4c13fb") '('"_wide_channels" $17))) (let $19 (DqPhyStage '($15) (lambda '($32) (block '( (let $33 (lambda '($37) (Member $37 '"r.Key") (Member $37 '"r.Value"))) (return (FromFlow (ExpandMap (ToFlow (TopSort (Map (Filter $32 (lambda '($34) (And (Exists (Member $34 '"Key")) (Coalesce (!= (Member $34 '"Value") (String '"Value22")) (Bool 'false))))) (lambda '($35) (AsStruct '('"r.Key" (Member $35 '"Key")) '('"r.Value" (Member $35 '"Value"))))) $16 (Bool 'true) (lambda '($36) (Member $36 '"r.Key")))) $33))) ))) $18)) (let $20 (DqCnMerge (TDqOutput $19 '0) '('('0 '"Asc")))) (let $21 (DqPhyStage '($20) (lambda '($38) (FromFlow (NarrowMap (Take (ToFlow $38) $16) (lambda '($39 $40) (AsStruct '('"r.Key" $39) '('"r.Value" $40)))))) '('('"_logical_id" '1208) '('"_id" '"bce6c2cd-49fc3a97-9c2708ff-498d23c")))) (let $22 '($13 $19 $21)) (let $23 (DqCnResult (TDqOutput $21 '0) '('"r.Key" '"r.Value"))) (let $24 (KqpTxResultBinding $11 '0 '0)) (let $25 (KqpPhysicalTx $22 '($23) '('($9 $24)) $7)) (return (KqpPhysicalQuery '($8 $25) '((KqpTxResultBinding (ListType $17) '1 '0)) '('('"type" '"data_query")))) ) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:04:07.268549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:04:07.268577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:07.268584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:04:07.268592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:04:07.268600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:04:07.268605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:04:07.268615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:07.268629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:04:07.268764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:04:07.268828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:04:07.288724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:04:07.288749Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:07.292405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:04:07.292487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:04:07.292530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:04:07.294738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:04:07.294854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:04:07.294972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:07.295196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:07.295818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:07.295871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:04:07.296175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:07.296189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:07.296218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:04:07.296227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:07.296232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:04:07.296312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:04:07.297516Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-11-03T14:04:07.323468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:07.323562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:07.323628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:07.323638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:07.323722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:07.323740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:07.327025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:07.327092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:07.327172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:07.327187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:07.327194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:07.327201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:07.331035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:07.331063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:07.331072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:07.335848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:07.335878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:07.335886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:07.335897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:07.336741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:07.339227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:07.339330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:07.339620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:07.339668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:07.339678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:07.339769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:07.339781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:07.339820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:07.339835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:07.340480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:07.340491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_ ... ress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-11-03T14:04:09.967848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-11-03T14:04:09.967862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976725761:0 progress is 1/1 2025-11-03T14:04:09.967867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-11-03T14:04:09.967873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976725761:0 progress is 1/1 2025-11-03T14:04:09.967877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-11-03T14:04:09.967883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-11-03T14:04:09.967894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:567:2507] message: TxId: 281474976725761 2025-11-03T14:04:09.967899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-11-03T14:04:09.967902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976725761:0 2025-11-03T14:04:09.967906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976725761:0 2025-11-03T14:04:09.967915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-11-03T14:04:09.968397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-11-03T14:04:09.968411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976725761 2025-11-03T14:04:09.968424Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2480: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976725761 2025-11-03T14:04:09.968449Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2483: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1151:3020], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 101 UploadBytes: 2424 ReadRows: 101 ReadBytes: 2424 CpuTimeUs: 0, Billed: UploadRows: 101 UploadBytes: 2424 ReadRows: 101 ReadBytes: 2424 CpuTimeUs: 0}, txId# 281474976725761 2025-11-03T14:04:09.968840Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1518: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Unlocking 2025-11-03T14:04:09.968866Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1519: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1151:3020], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 101 UploadBytes: 2424 ReadRows: 101 ReadBytes: 2424 CpuTimeUs: 0, Billed: UploadRows: 101 UploadBytes: 2424 ReadRows: 101 ReadBytes: 2424 CpuTimeUs: 0} 2025-11-03T14:04:09.968875Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-11-03T14:04:09.969225Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1518: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Done 2025-11-03T14:04:09.969244Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1519: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Done TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1151:3020], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 101 UploadBytes: 2424 ReadRows: 101 ReadBytes: 2424 CpuTimeUs: 0, Billed: UploadRows: 101 UploadBytes: 2424 ReadRows: 101 ReadBytes: 2424 CpuTimeUs: 0} 2025-11-03T14:04:09.969251Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-11-03T14:04:09.969270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-11-03T14:04:09.969278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1171:3040] TestWaitNotification: OK eventTxId 106 2025-11-03T14:04:09.969677Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-11-03T14:04:09.969741Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:103: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } 2025-11-03T14:04:09.969905Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-11-03T14:04:09.969945Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 43us result status StatusSuccess 2025-11-03T14:04:09.970066Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "DefaultValue" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> TSchemeShardCheckProposeSize::CopyTable >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] >> TSequence::CreateSequenceParallel >> TSchemeShardTest::AlterTableDropColumnReCreateSplit >> TSchemeShardTest::RmDirTwice |56.4%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order >> BasicStatistics::ServerlessGlobalIndex [GOOD] >> TIncrementalRestoreTests::MultipleCollectionsGenerateMultipleTEvRunIncrementalRestoreEvents >> IndexBuildTestReboots::DropIndexWithDataColumnsUniq |56.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |56.5%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[type_v3-mergejoin_with_sort--Results] [GOOD] |56.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-11-03T14:04:07.011232Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499662372756424:2165];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:07.011372Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:04:07.015466Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7568499658735935721:2086];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:07.015609Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0025ce/r3tmp/tmpu6ADgu/pdisk_1.dat 2025-11-03T14:04:07.017664Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:04:07.015399Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:04:07.048679Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:07.048862Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:07.060972Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16425, node 1 2025-11-03T14:04:07.087170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ud3d/0025ce/r3tmp/yandexuXLD2v.tmp 2025-11-03T14:04:07.087186Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ud3d/0025ce/r3tmp/yandexuXLD2v.tmp 2025-11-03T14:04:07.089212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ud3d/0025ce/r3tmp/yandexuXLD2v.tmp 2025-11-03T14:04:07.089291Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:04:07.091437Z INFO: TTestServer started on Port 16563 GrpcPort 16425 TClient is connected to server localhost:16563 PQClient connected to localhost:16425 === TenantModeEnabled() = 1 === Init PQ - start server on port 16425 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-11-03T14:04:07.111930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:07.111964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient::Ls response: 2025-11-03T14:04:07.113510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:07.119967Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:07.119994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:07.121407Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-11-03T14:04:07.121644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:07.134325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-11-03T14:04:07.134386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:07.134441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-11-03T14:04:07.134453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 281474976720657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-11-03T14:04:07.134522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-11-03T14:04:07.134537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:07.135223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-11-03T14:04:07.135286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-11-03T14:04:07.135327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:07.135339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-11-03T14:04:07.135346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-11-03T14:04:07.135350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720657:0 2 -> 3 waiting... 2025-11-03T14:04:07.135809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-11-03T14:04:07.135823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-11-03T14:04:07.135826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-11-03T14:04:07.135921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:07.135937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-11-03T14:04:07.135941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720657:0 3 -> 128 2025-11-03T14:04:07.136378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:07.136388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:07.136392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-11-03T14:04:07.136397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-11-03T14:04:07.136996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:07.137415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-11-03T14:04:07.137447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-11-03T14:04:07.138019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1762178647187, transactions count in step: 1, at schemeshard: 72057594046644480 2025-11-03T14:04:07.138042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1762178647187 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-11-03T14:04:07.138051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-11-03T14:04:07.138117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720657:0 128 -> 240 2025-11-03T14:04:07.138127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-11-03T14:04:07.138155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-11-03T14:04:07. ... on: 0 MaxSeqNo: 2 sessionId: 123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0 2025-11-03T14:04:09.736684Z :INFO: [] MessageGroupId [123] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1762178649736 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-11-03T14:04:09.736719Z :INFO: [] MessageGroupId [123] SessionId [] Write session established. Init response: last_sequence_number: 2 session_id: "123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0" topic: "PQ/account/topic" 2025-11-03T14:04:09.736862Z :DEBUG: [] MessageGroupId [123] SessionId [123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0] Write 1 messages with Id from 1 to 1 2025-11-03T14:04:09.736886Z :DEBUG: [] MessageGroupId [123] SessionId [123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0] Write session: try to update token 2025-11-03T14:04:09.736893Z :DEBUG: [] MessageGroupId [123] SessionId [123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0] Send 1 message(s) (0 left), first sequence number is 3 2025-11-03T14:04:09.736978Z :INFO: [] MessageGroupId [123] SessionId [123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0] Write session: close. Timeout = 10000 ms 2025-11-03T14:04:09.737144Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 4 sessionId: 123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0 grpc read done: success: 1 data: write_request[data omitted] 2025-11-03T14:04:09.737278Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-11-03T14:04:09.737450Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'topic' requestId: 2025-11-03T14:04:09.737467Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2635: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-11-03T14:04:09.737495Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-11-03T14:04:09.737774Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::IEventHandle 2025-11-03T14:04:09.738103Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:146: Handle TEvRequest topic: 'topic' requestId: 2025-11-03T14:04:09.738122Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2635: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-11-03T14:04:09.738140Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2011: [PQ: 72075186224037893] got client message topic: PQ/account/topic partition: 0 SourceId: '\000123' SeqNo: 3 partNo : 0 messageNo: 1 size 372 offset: -1 2025-11-03T14:04:09.738152Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:637: [72075186224037893][Partition][0][StateIdle] Received TPartition::TEvWrite 2025-11-03T14:04:09.738184Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1756: [72075186224037893][Partition][0][StateIdle] Send write quota request. Topic: "PQ/account/topic". Partition: 0. Amount: 376. Cookie: 3 2025-11-03T14:04:09.738206Z node 4 :PERSQUEUE DEBUG: partition.cpp:4052: [72075186224037893][Partition][0][StateIdle] Got quota. Topic: "PQ/account/topic". Partition: 0: Cookie: 3 2025-11-03T14:04:09.738218Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:09.738227Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:09.738232Z node 4 :PERSQUEUE DEBUG: partition.cpp:2302: [72075186224037893][Partition][0][StateIdle] Batch completed (1) 2025-11-03T14:04:09.738268Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1319: [72075186224037893][Partition][0][StateIdle] Topic 'PQ/account/topic' partition 0 part blob processing sourceId '\000123' seqNo 3 partNo 0 2025-11-03T14:04:09.738329Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1423: [72075186224037893][Partition][0][StateIdle] Topic 'PQ/account/topic' partition 0 part blob complete sourceId '\000123' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 443 count 1 nextOffset 3 batches 1 2025-11-03T14:04:09.738342Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:04:09.738402Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1675: [72075186224037893][Partition][0][StateIdle] Add new write blob: topic 'PQ/account/topic' partition 0 compactOffset 2,1 HeadOffset 2 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000? size 431 WTime 1762178649738 2025-11-03T14:04:09.738450Z node 4 :PERSQUEUE DEBUG: read.h:275: [72075186224037893][PQCacheProxy]CacheProxy. Passthrough write request to KV 2025-11-03T14:04:09.738469Z node 4 :PERSQUEUE DEBUG: read.h:313: [72075186224037893][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 431 2025-11-03T14:04:09.739925Z node 4 :PERSQUEUE DEBUG: cache_eviction.h:321: Caching head blob in L1. Partition 0 offset 2 count 1 size 431 actorID [4:7568499669253637281:2308] 2025-11-03T14:04:09.739956Z node 4 :PERSQUEUE DEBUG: partition.cpp:2116: [72075186224037893][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2025-11-03T14:04:09.739977Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:574: [72075186224037893][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 376 WriteNewSizeFromSupportivePartitions# 0 2025-11-03T14:04:09.739977Z node 4 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037893' partition 0 offset 2 partno 0 count 1 parts 0 suffix '63' size 431 2025-11-03T14:04:09.739995Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:58: [72075186224037893][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2025-11-03T14:04:09.740007Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:361: [72075186224037893][Partition][0][StateIdle] Answering for message sourceid: '\000123', Topic: 'PQ/account/topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-11-03T14:04:09.740046Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:09.740054Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:09.740073Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:04:09.740089Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037893][Partition][0][StateIdle] No data for blobs compaction 2025-11-03T14:04:09.740101Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:181: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-11-03T14:04:09.740285Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::IEventHandle 2025-11-03T14:04:09.740688Z :DEBUG: [] MessageGroupId [123] SessionId [123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 1 } 2025-11-03T14:04:09.740708Z :DEBUG: [] MessageGroupId [123] SessionId [123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0] Write session: acknoledged message 1 2025-11-03T14:04:09.772625Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:09.772652Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:09.772658Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037889][Partition][0][StateIdle] Try persist 2025-11-03T14:04:09.784435Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037891][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:09.784468Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037891][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:09.784479Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037891][Partition][0][StateIdle] Try persist 2025-11-03T14:04:09.791038Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:09.791054Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:09.791056Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:04:09.792557Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:04:09.794065Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:04:09.837065Z :INFO: [] MessageGroupId [123] SessionId [123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0] Write session will now close 2025-11-03T14:04:09.837087Z :DEBUG: [] MessageGroupId [123] SessionId [123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0] Write session: aborting 2025-11-03T14:04:09.837277Z :INFO: [] MessageGroupId [123] SessionId [123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0] Write session: gracefully shut down, all writes complete 2025-11-03T14:04:09.837287Z :DEBUG: [] MessageGroupId [123] SessionId [123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0] Write session: destroy 2025-11-03T14:04:09.837476Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 4 sessionId: 123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0 grpc read done: success: 0 data: 2025-11-03T14:04:09.837495Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 4 sessionId: 123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0 grpc read failed 2025-11-03T14:04:09.837504Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 4 sessionId: 123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0 grpc closed 2025-11-03T14:04:09.837507Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 4 sessionId: 123|fafb29d-a4aeda2e-afeb9a5a-5ae9f84d_0 is DEAD 2025-11-03T14:04:09.837839Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-11-03T14:04:09.838011Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2746: [PQ: 72075186224037893] server disconnected, pipe [3:7568499671022375071:2392] destroyed 2025-11-03T14:04:09.838042Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037893][Partition][0][StateIdle] TPartition::DropOwner. 2025-11-03T14:04:09.838051Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037893][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:09.838055Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037893][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:09.838058Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037893][Partition][0][StateIdle] Try persist 2025-11-03T14:04:09.872960Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037889][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:09.872978Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037889][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:09.872981Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037889][Partition][0][StateIdle] Try persist >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:04:08.646836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:04:08.646867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:08.646875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:04:08.646884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:04:08.646898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:04:08.646903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:04:08.646914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:08.646932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:04:08.647065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:04:08.647143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:04:08.663336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:04:08.663364Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:08.666977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:04:08.667079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:04:08.667131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:04:08.669116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:04:08.669194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:04:08.669311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:08.669416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:08.670055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:08.670109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:04:08.670365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:08.670373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:08.670391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:04:08.670398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:08.670403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:04:08.670440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:04:08.672201Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-11-03T14:04:08.699148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:08.699235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:08.699302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:08.699311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:08.699389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:08.699406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:08.700296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:08.700339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:08.700396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:08.700406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:08.700412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:08.700417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:08.700905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:08.700920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:08.700930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:08.701370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:08.701379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:08.701385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:08.701391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:08.702109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:08.702678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:08.702747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:08.702973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:08.702999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:08.703006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:08.703080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:08.703088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:08.703121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:08.703134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:08.703706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:08.703720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_ ... __progress.cpp:2808: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1151:3020], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976725757 2025-11-03T14:04:10.290061Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1518: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 AlterMainTable 2025-11-03T14:04:10.290086Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1519: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 AlterMainTable TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1151:3020], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-11-03T14:04:10.290147Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:454: AlterMainTablePropose 106 AlterMainTable Transaction { WorkingDir: "/MyRoot/ServerLessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" Columns { Name: "value" Type: "Uint64" DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: true } } Internal: true } TxId: 281474976725757 TabletId: 72075186233409549 FailOnExist: true 2025-11-03T14:04:10.291007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerLessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" Columns { Name: "value" Type: "Uint64" DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: true } } Internal: true } TxId: 281474976725757 TabletId: 72075186233409549 FailOnExist: true , at schemeshard: 72075186233409549 2025-11-03T14:04:10.291050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:523: TAlterTable Propose, path: /MyRoot/ServerLessDB/Table, pathId: , opId: 281474976725757:0, at schemeshard: 72075186233409549 2025-11-03T14:04:10.291136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976725757:1, propose status:StatusInvalidParameter, reason: Cannot alter type for column 'value', at schemeshard: 72075186233409549 2025-11-03T14:04:10.291713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976725757, response: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2025-11-03T14:04:10.291765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976725757, database: /MyRoot/ServerLessDB, subject: , status: StatusInvalidParameter, reason: Cannot alter type for column 'value', operation: ALTER TABLE, path: /MyRoot/ServerLessDB/Table 2025-11-03T14:04:10.291792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7125: Handle: TEvModifySchemeTransactionResult: txId# 281474976725757, status# StatusInvalidParameter 2025-11-03T14:04:10.291800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7127: Message: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2025-11-03T14:04:10.291815Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2613: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 106, cookie: 106, record: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549, status: StatusInvalidParameter 2025-11-03T14:04:10.291838Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2618: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1151:3020], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 106, record: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549, status: StatusInvalidParameter 2025-11-03T14:04:10.292102Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:2587: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuilder::TTxReply: ReplyOnCreation, BuildIndexId: 106, status: BAD_REQUEST, error: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column 'value', replyTo: [1:1151:3020], message: TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } } BUILDCOLUMN RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } } 2025-11-03T14:04:10.292948Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-11-03T14:04:10.292991Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 48us result status StatusSuccess 2025-11-03T14:04:10.293099Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:04:08.013916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:04:08.013941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:08.013949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:04:08.013956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:04:08.013963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:04:08.013968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:04:08.013978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:08.013991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:04:08.014105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:04:08.014167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:04:08.037991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:04:08.038017Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:08.044908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:04:08.045013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:04:08.045062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:04:08.064417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:04:08.064529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:04:08.064645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:08.064863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:08.065418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:08.065466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:04:08.065733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:08.065769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:08.065798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:04:08.065809Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:08.065815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:04:08.065843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:04:08.067604Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-11-03T14:04:08.104545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:08.104624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:08.104682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:08.104691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:08.104758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:08.104775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:08.106989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:08.107044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:08.107112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:08.107125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:08.107131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:08.107137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:08.107714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:08.107729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:08.107735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:08.108133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:08.108146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:08.108152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:08.108159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:08.108902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:08.109357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:08.109410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:08.109616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:08.109646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:08.109654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:08.109724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:08.109732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:08.109764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:08.109776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:08.110205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:08.110215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_ ... 11-03T14:04:10.279731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976725762:0, at schemeshard: 72075186233409549 2025-11-03T14:04:10.279749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976725762:0 ProgressState 2025-11-03T14:04:10.279770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976725762:0 progress is 1/1 2025-11-03T14:04:10.279776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725762 ready parts: 1/1 2025-11-03T14:04:10.279783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976725762:0 progress is 1/1 2025-11-03T14:04:10.279787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725762 ready parts: 1/1 2025-11-03T14:04:10.279794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725762, ready parts: 1/1, is published: true 2025-11-03T14:04:10.279810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:567:2507] message: TxId: 281474976725762 2025-11-03T14:04:10.279819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725762 ready parts: 1/1 2025-11-03T14:04:10.279825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976725762:0 2025-11-03T14:04:10.279831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976725762:0 2025-11-03T14:04:10.279847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 12 2025-11-03T14:04:10.280397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7176: Handle: TEvNotifyTxCompletionResult: txId# 281474976725762 2025-11-03T14:04:10.280413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7178: Message: TxId: 281474976725762 2025-11-03T14:04:10.280430Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2480: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976725762 2025-11-03T14:04:10.280469Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:2483: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1612:3376], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725760, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976725762 2025-11-03T14:04:10.280905Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1518: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking 2025-11-03T14:04:10.280932Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1519: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1612:3376], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725760, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-11-03T14:04:10.280946Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-11-03T14:04:10.281247Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1518: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled 2025-11-03T14:04:10.281263Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1519: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1612:3376], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725760, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2025-11-03T14:04:10.281267Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-11-03T14:04:10.281285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-11-03T14:04:10.281291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1636:3400] TestWaitNotification: OK eventTxId 106 2025-11-03T14:04:10.281653Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-11-03T14:04:10.281764Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:103: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2025-11-03T14:04:10.282114Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-11-03T14:04:10.282175Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 69us result status StatusSuccess 2025-11-03T14:04:10.282318Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |56.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |56.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |56.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] Test command err: === Server->StartServer(false); 2025-11-03T14:04:07.473566Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499659765055791:2080];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:07.473772Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:04:07.477629Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:04:07.478011Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7568499661311148246:2082];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:07.478215Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0025e9/r3tmp/tmpGgxCHu/pdisk_1.dat 2025-11-03T14:04:07.480282Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:04:07.524609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:07.534798Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:07.541918Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 23094, node 1 2025-11-03T14:04:07.552985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ud3d/0025e9/r3tmp/yandex3X7imW.tmp 2025-11-03T14:04:07.552999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ud3d/0025e9/r3tmp/yandex3X7imW.tmp 2025-11-03T14:04:07.553069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ud3d/0025e9/r3tmp/yandex3X7imW.tmp 2025-11-03T14:04:07.553139Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:04:07.556868Z INFO: TTestServer started on Port 21098 GrpcPort 23094 TClient is connected to server localhost:21098 PQClient connected to localhost:23094 === TenantModeEnabled() = 1 === Init PQ - start server on port 23094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-11-03T14:04:07.575460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:07.575492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:07.577223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:07.581727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:07.581747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:07.582863Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-11-03T14:04:07.583120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:07.589911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976720657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-11-03T14:04:07.589982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976720657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:07.590046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-11-03T14:04:07.590057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 281474976720657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2025-11-03T14:04:07.590115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976720657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-11-03T14:04:07.590131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:07.590847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976720657, response: Status: StatusAccepted TxId: 281474976720657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-11-03T14:04:07.590927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976720657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-11-03T14:04:07.590984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:07.590999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976720657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-11-03T14:04:07.591003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976720657:0 ProgressState no shards to create, do next state 2025-11-03T14:04:07.591007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720657:0 2 -> 3 waiting... 2025-11-03T14:04:07.591544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:30: NotifyTxCompletion operation in-flight, txId: 281474976720657, at schemeshard: 72057594046644480 2025-11-03T14:04:07.591556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 0/1, is published: true 2025-11-03T14:04:07.591560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:131: NotifyTxCompletion transaction is registered, txId: 281474976720657, at schemeshard: 72057594046644480 2025-11-03T14:04:07.591627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:07.591639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976720657:0 ProgressState, at schemeshard: 72057594046644480 2025-11-03T14:04:07.591642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720657:0 3 -> 128 2025-11-03T14:04:07.592114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:07.592128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976720657:0, at schemeshard: 72057594046644480 2025-11-03T14:04:07.592134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976720657:0, at tablet# 72057594046644480 2025-11-03T14:04:07.592140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976720657 ready parts: 1/1 2025-11-03T14:04:07.592946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976720657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:07.593420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976720657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976720657 msg type: 269090816 2025-11-03T14:04:07.593477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976720657, partId: 4294967295, tablet: 72057594046316545 2025-11-03T14:04:07.594175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1762178647642, transactions count in step: 1, at schemeshard: 72057594046644480 2025-11-03T14:04:07.594223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1762178647642 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-11-03T14:04:07.594238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-11-03T14:04:07.594307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 281474976720657:0 128 -> 240 2025-11-03T14:04:07.594322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976720657:0, at tablet# 72057594046644480 2025-11-03T14:04:07.594367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-11-03T14:0 ... 8993Z :INFO: [/Root] [/Root] [1cec15fc-d5d45917-5c083ab9-ec897e9] Starting read session 2025-11-03T14:04:09.888999Z :DEBUG: [/Root] [/Root] [1cec15fc-d5d45917-5c083ab9-ec897e9] Starting session to cluster null (localhost:14223) 2025-11-03T14:04:09.889020Z :DEBUG: [/Root] [/Root] [1cec15fc-d5d45917-5c083ab9-ec897e9] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:04:09.889023Z :DEBUG: [/Root] [/Root] [1cec15fc-d5d45917-5c083ab9-ec897e9] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:04:09.889025Z :DEBUG: [/Root] [/Root] [1cec15fc-d5d45917-5c083ab9-ec897e9] [null] Reconnecting session to cluster null in 0.000000s 2025-11-03T14:04:09.889206Z :DEBUG: [/Root] [/Root] [1cec15fc-d5d45917-5c083ab9-ec897e9] [null] Successfully connected. Initializing session 2025-11-03T14:04:09.889377Z node 3 :PQ_READ_PROXY DEBUG: grpc_pq_read.h:109: new grpc connection 2025-11-03T14:04:09.889389Z node 3 :PQ_READ_PROXY DEBUG: grpc_pq_read.h:131: new session created cookie 2 2025-11-03T14:04:09.889504Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-11-03T14:04:09.889539Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:929: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 read init: from# ipv6:[::1]:53056, request# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-11-03T14:04:09.889581Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 auth for : consumer_aba 2025-11-03T14:04:09.889804Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:131: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 Handle describe topics response 2025-11-03T14:04:09.889838Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:68: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 auth is DEAD 2025-11-03T14:04:09.889843Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1046: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 auth ok: topics# 1, initDone# 0 2025-11-03T14:04:09.890089Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1217: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 register session: topic# /Root/account1/write_topic 2025-11-03T14:04:09.890193Z :INFO: [/Root] [/Root] [1cec15fc-d5d45917-5c083ab9-ec897e9] [null] Server session id: consumer_aba_3_2_18436544700621337318_v1 2025-11-03T14:04:09.890328Z :DEBUG: [/Root] [/Root] [1cec15fc-d5d45917-5c083ab9-ec897e9] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-11-03T14:04:09.890215Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1655: [72075186224037894][write_topic] pipe [3:7568499668772857193:2378] connected; active server actors: 1 2025-11-03T14:04:09.890230Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1706: [72075186224037894][write_topic] consumer "consumer_aba" register session for pipe [3:7568499668772857193:2378] session consumer_aba_3_2_18436544700621337318_v1 2025-11-03T14:04:09.890245Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:639: [72075186224037894][write_topic] consumer consumer_aba register readable partition 0 2025-11-03T14:04:09.890257Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:669: [72075186224037894][write_topic] consumer consumer_aba family created family=1 (Status=Free, Partitions=[0]) 2025-11-03T14:04:09.890265Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:871: [72075186224037894][write_topic] consumer consumer_aba register reading session ReadingSession "consumer_aba_3_2_18436544700621337318_v1" (Sender=[3:7568499668772857190:2378], Pipe=[3:7568499668772857193:2378], Partitions=[], ActiveFamilyCount=0) 2025-11-03T14:04:09.890268Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1186: [72075186224037894][write_topic] consumer consumer_aba rebalancing was scheduled 2025-11-03T14:04:09.890280Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1258: [72075186224037894][write_topic] consumer consumer_aba balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-11-03T14:04:09.890290Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1305: [72075186224037894][write_topic] consumer consumer_aba balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "consumer_aba_3_2_18436544700621337318_v1" (Sender=[3:7568499668772857190:2378], Pipe=[3:7568499668772857193:2378], Partitions=[], ActiveFamilyCount=0) 2025-11-03T14:04:09.890301Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:549: [72075186224037894][write_topic] consumer consumer_aba family 1 status Active partitions [0] session "consumer_aba_3_2_18436544700621337318_v1" sender [3:7568499668772857190:2378] lock partition 0 for ReadingSession "consumer_aba_3_2_18436544700621337318_v1" (Sender=[3:7568499668772857190:2378], Pipe=[3:7568499668772857193:2378], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-11-03T14:04:09.890320Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1325: [72075186224037894][write_topic] consumer consumer_aba start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-11-03T14:04:09.890324Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1402: [72075186224037894][write_topic] consumer consumer_aba balancing duration: 0.000041s 2025-11-03T14:04:09.890438Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1335: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 assign: record# { Partition: 0 TabletId: 72075186224037893 Topic: "write_topic" Generation: 1 Step: 1 Session: "consumer_aba_3_2_18436544700621337318_v1" ClientId: "consumer_aba" PipeClient { RawX1: 7568499668772857193 RawX2: 4503612512274762 } Path: "/Root/account1/write_topic" } 2025-11-03T14:04:09.890460Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:1143: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 INITING TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) 2025-11-03T14:04:09.890513Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 grpc read done: success# 1, data# { read { } } 2025-11-03T14:04:09.890561Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1839: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 got read request: guid# f51cb9ce-3710bd94-e37c231f-f859c3f4 2025-11-03T14:04:09.891976Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:983: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037893 Generation: 1, pipe: [3:7568499668772857196:2381] 2025-11-03T14:04:09.892160Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: consumer_aba_3_2_18436544700621337318_v1:1 with generation 1 2025-11-03T14:04:09.895317Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:663: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 1 WriteTimestampMS: 1762178649785 CreateTimestampMS: 1762178649784 SizeLag: 165 WriteTimestampEstimateMS: 1762178649785 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2025-11-03T14:04:09.895343Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:694: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 INIT DONE TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 0 committedOffset 0 2025-11-03T14:04:09.895373Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1434: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 sending to client partition status Got new read session event: CreatePartitionStream { PartitionStreamId: 1 TopicPath: account1/write_topic Cluster: PartitionId: 0 CommittedOffset: 0 EndOffset: 1 } 2025-11-03T14:04:09.896082Z :INFO: [/Root] [/Root] [1cec15fc-d5d45917-5c083ab9-ec897e9] Closing read session. Close timeout: 0.000000s 2025-11-03T14:04:09.896099Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account1/write_topic:0:1:0:0 2025-11-03T14:04:09.896114Z :INFO: [/Root] [/Root] [1cec15fc-d5d45917-5c083ab9-ec897e9] Counters: { Errors: 0 CurrentSessionLifetimeMs: 7 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-11-03T14:04:09.896138Z :NOTICE: [/Root] [/Root] [1cec15fc-d5d45917-5c083ab9-ec897e9] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-11-03T14:04:09.896146Z :DEBUG: [/Root] [/Root] [1cec15fc-d5d45917-5c083ab9-ec897e9] [null] Abort session to cluster 2025-11-03T14:04:09.896280Z :NOTICE: [/Root] [/Root] [1cec15fc-d5d45917-5c083ab9-ec897e9] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-11-03T14:04:09.896526Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 grpc read done: success# 0, data# { } 2025-11-03T14:04:09.896615Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 grpc read failed 2025-11-03T14:04:09.896626Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 grpc closed 2025-11-03T14:04:09.896742Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 2 consumer consumer_aba session consumer_aba_3_2_18436544700621337318_v1 is DEAD 2025-11-03T14:04:09.897004Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: consumer_aba_3_2_18436544700621337318_v1 2025-11-03T14:04:09.897342Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037894][write_topic] pipe [3:7568499668772857193:2378] disconnected. 2025-11-03T14:04:09.897352Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037894][write_topic] pipe [3:7568499668772857193:2378] disconnected; active server actors: 1 2025-11-03T14:04:09.897359Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037894][write_topic] pipe [3:7568499668772857193:2378] client consumer_aba disconnected session consumer_aba_3_2_18436544700621337318_v1 2025-11-03T14:04:10.076877Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:04:10.078398Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TSchemeShardTest::RmDirTwice [GOOD] >> TSchemeShardTest::TopicMeteringMode ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2025-11-03T14:04:02.380843Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:02.380876Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:02.384484Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:02.384563Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:02.416296Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:02.417640Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:137:2162], cookie=1019674851108757683, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-11-03T14:04:02.417728Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-11-03T14:04:02.428663Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:137:2162], cookie=1019674851108757683) 2025-11-03T14:04:02.428851Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:148:2170], cookie=14317982105996231582, path="/Root/Res", config={ }) 2025-11-03T14:04:02.428907Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-11-03T14:04:02.439804Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:148:2170], cookie=14317982105996231582) 2025-11-03T14:04:02.440184Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:153:2175]. Cookie: 1587439418332126471. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:02.440198Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[1:153:2175], cookie=1587439418332126471) 2025-11-03T14:04:02.440256Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [1:153:2175]. Cookie: 2796308818757725367. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2025-11-03T14:04:02.440261Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[1:153:2175], cookie=2796308818757725367) 2025-11-03T14:04:04.249481Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:04.249512Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:04.253048Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:04.253220Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:04.274590Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:04.274730Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:138:2162], cookie=5614772056222181410, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-11-03T14:04:04.274793Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-11-03T14:04:04.285430Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:138:2162], cookie=5614772056222181410) 2025-11-03T14:04:04.285614Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:148:2170]. Cookie: 4673843513170740494. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:04.285621Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:148:2170], cookie=4673843513170740494) 2025-11-03T14:04:04.285686Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:148:2170]. Cookie: 6295514014237948462. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:04.285690Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:148:2170], cookie=6295514014237948462) 2025-11-03T14:04:04.285738Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:148:2170]. Cookie: 8498543127536007180. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-11-03T14:04:04.285742Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:148:2170], cookie=8498543127536007180) 2025-11-03T14:04:04.285776Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:148:2170]. Cookie: 16487123978152543839. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-11-03T14:04:04.285779Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:148:2170], cookie=16487123978152543839) 2025-11-03T14:04:06.172317Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:06.172350Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:06.176250Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:06.176328Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:06.199076Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:06.199259Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:138:2162], cookie=18268705171572010480, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-11-03T14:04:06.199360Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-11-03T14:04:06.211046Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:138:2162], cookie=18268705171572010480) 2025-11-03T14:04:06.211268Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:148:2170], cookie=8398630997326528243, path="/Root/Res1", config={ }) 2025-11-03T14:04:06.211355Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2025-11-03T14:04:06.222444Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:148:2170], cookie=8398630997326528243) 2025-11-03T14:04:06.222662Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:153:2175], cookie=13912794046457431492, path="/Root/Res2", config={ }) 2025-11-03T14:04:06.222735Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2025-11-03T14:04:06.233749Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:153:2175], cookie=13912794046457431492) 2025-11-03T14:04:06.234042Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:158:2180]. Cookie: 7413403913432240263. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:06.234055Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:158:2180], cookie=7413403913432240263) 2025-11-03T14:04:06.234155Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:158:2180]. Cookie: 7018295989419597308. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:06.234161Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:158:2180], cookie=7018295989419597308) 2025-11-03T14:04:06.234242Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:158:2180]. Cookie: 7745052327008530073. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1020500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1020500 } } 2025-11-03T14:04:06.234253Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:158:2180], cookie=7745052327008530073) 2025-11-03T14:04:08.258588Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:08.258619Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:08.262423Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:08.262469Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:08.284557Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:08.284735Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:137:2162], cookie=13677943802294675447, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-11-03T14:04:08.284823Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-11-03T14:04:08.309751Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:137:2162], cookie=13677943802294675447) 2025-11-03T14:04:08.310087Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:148:2170]. Cookie: 8330035546268236255. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { Enabled: true BillingPeriodSec: 2 Labels { key: "k1" value: "v1" } Labels { key: "k2" value: "v2" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:08.310100Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:148:2170], cookie=8330035546268236255) 2025-11-03T14:04:08.310192Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:148:2170]. Cookie: 16742377642782400375. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 27500 } } 2025-11-03T14:04:08.310199Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:148:2170], cookie=16742377642782400375) 2025-11-03T14:04:10.391780Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:10.391877Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:10.396516Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:10.396613Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:10.428414Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:10.428583Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:137:2162], cookie=12958540582432534241, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-11-03T14:04:10.428654Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-11-03T14:04:10.439648Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:137:2162], cookie=12958540582432534241) 2025-11-03T14:04:10.439860Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:147:2169], cookie=11773007989315617442, path="/Root/Res", config={ }) 2025-11-03T14:04:10.439932Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-11-03T14:04:10.450912Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:147:2169], cookie=11773007989315617442) 2025-11-03T14:04:10.451154Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:152:2174]. Cookie: 3808409418332408030. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:10.451169Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:152:2174], cookie=3808409418332408030) 2025-11-03T14:04:10.451267Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:34: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:156:2178], cookie=13732200934325354986, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2025-11-03T14:04:10.451317Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:61: [72057594037927937] Updated quoter resource 1 "Root" 2025-11-03T14:04:10.451346Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:152:2174]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2025-11-03T14:04:10.462224Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:75: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:156:2178], cookie=13732200934325354986) 2025-11-03T14:04:10.462397Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:152:2174]. Cookie: 16179402257829582959. Data: { } 2025-11-03T14:04:10.462405Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:152:2174], cookie=16179402257829582959) |56.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> TSchemeShardCheckProposeSize::CopyTables >> TSchemeShardTest::AlterTableDropColumnReCreateSplit [GOOD] >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential >> TSchemeShardTest::TopicMeteringMode [GOOD] >> TSchemeShardTest::Restart ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessGlobalIndex [GOOD] Test command err: 2025-11-03T14:03:35.083501Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:35.120818Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2405], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:35.120870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:35.120889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/00210b/r3tmp/tmp4PKEyn/pdisk_1.dat 2025-11-03T14:03:35.214957Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:35.247684Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:35.247727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:35.269250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11621, node 1 2025-11-03T14:03:35.335245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:35.335268Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:35.335271Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:35.335408Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:03:35.338522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:35.374408Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:3987 2025-11-03T14:03:35.709433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-11-03T14:03:36.525299Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:36.527787Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-11-03T14:03:36.528941Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:36.540566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:36.540615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:36.579572Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-11-03T14:03:36.580198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:36.655560Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:36.655605Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:36.655984Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:36.656166Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:36.656315Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:36.656515Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:36.656573Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:36.656616Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:36.656680Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:36.656733Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:36.656755Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:36.668980Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:36.737161Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:36.747009Z node 2 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-11-03T14:03:36.747040Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-11-03T14:03:36.755166Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-11-03T14:03:36.755335Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-11-03T14:03:36.755364Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-11-03T14:03:36.755372Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-11-03T14:03:36.755379Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-11-03T14:03:36.755387Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-11-03T14:03:36.755394Z node 2 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-11-03T14:03:36.755401Z node 2 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-11-03T14:03:36.755550Z node 2 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-11-03T14:03:36.758213Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8310: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-11-03T14:03:36.758238Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8340: ConnectToSA(), pipe client id: [2:1827:2584], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-11-03T14:03:36.760022Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1850:2598] 2025-11-03T14:03:36.760071Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1850:2598], schemeshard id = 72075186224037897 2025-11-03T14:03:36.773220Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [2:1879:2606] 2025-11-03T14:03:36.774013Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-11-03T14:03:36.777416Z node 2 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-11-03T14:03:36.777432Z node 2 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-11-03T14:03:36.777443Z node 2 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-11-03T14:03:36.778892Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:306: Path does not belong to the specified domain: self# [2:1963:2646], domain# [OwnerId: 72057594046644480, LocalPathId: 2], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-11-03T14:03:36.779593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:36.781211Z node 2 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-11-03T14:03:36.781238Z node 2 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-11-03T14:03:36.867641Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:36.920499Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-11-03T14:03:36.953437Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2025-11-03T14:03:37.012042Z node 2 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-11-03T14:03:37.140590Z node 2 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-11-03T14:03:37.853348Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:37.876601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-11-03T14:03:38.384361Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:38.395701Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8254: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 720751862240 ... node_id=2&id=ZGM2MGJjZGMtYzhjZTRlYWItZmYwNWU0OTItYzY1NjVlOWY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-11-03T14:04:05.779712Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4141:3551]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:05.779769Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2025-11-03T14:04:05.779801Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:177: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-11-03T14:04:05.779805Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-11-03T14:04:05.779831Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-11-03T14:04:05.779842Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 29, ReplyToActorId = [2:4141:3551], StatRequests.size() = 1 2025-11-03T14:04:05.779851Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-11-03T14:04:05.800503Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4131:3541], ActorId: [2:4132:3542], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGM2MGJjZGMtYzhjZTRlYWItZmYwNWU0OTItYzY1NjVlOWY=, TxId: 2025-11-03T14:04:05.800531Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4131:3541], ActorId: [2:4132:3542], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGM2MGJjZGMtYzhjZTRlYWItZmYwNWU0OTItYzY1NjVlOWY=, TxId: 2025-11-03T14:04:05.800749Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4130:3540], ActorId: [2:4131:3541], Got response [2:4132:3542] SUCCESS 2025-11-03T14:04:05.800829Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-11-03T14:04:05.801184Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4161:3562]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:05.801248Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2025-11-03T14:04:05.801258Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 30, ReplyToActorId = [2:4161:3562], StatRequests.size() = 1 2025-11-03T14:04:05.831261Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-11-03T14:04:05.831291Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-11-03T14:04:05.918527Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-11-03T14:04:05.918560Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-11-03T14:04:06.002537Z node 2 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [2:3168:3130], schemeshard count = 1 2025-11-03T14:04:06.419196Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8442: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2025-11-03T14:04:06.419228Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.578000s, at schemeshard: 72075186224037899 2025-11-03T14:04:06.419299Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 50, entries count: 2, are all stats full: 1 2025-11-03T14:04:06.430862Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-11-03T14:04:07.031780Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4206:3589]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:07.031898Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2025-11-03T14:04:07.031908Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 31, ReplyToActorId = [2:4206:3589], StatRequests.size() = 1 2025-11-03T14:04:08.188265Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:304: [72075186224037894] EvPropagateTimeout 2025-11-03T14:04:08.198981Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-11-03T14:04:08.199011Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-11-03T14:04:08.199022Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 4] is data table. 2025-11-03T14:04:08.199027Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:752: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 4] 2025-11-03T14:04:08.199121Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4237:3603], ActorId: [2:4238:3604], Starting query actor #1 [2:4239:3605] 2025-11-03T14:04:08.199134Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4238:3604], ActorId: [2:4239:3605], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2025-11-03T14:04:08.199882Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4238:3604], ActorId: [2:4239:3605], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OGYyZDI3ZWQtNDQ2NGU2ZjMtMTUwMTM4ZTUtMjhkM2Q3NWI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-11-03T14:04:08.202680Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4238:3604], ActorId: [2:4239:3605], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGYyZDI3ZWQtNDQ2NGU2ZjMtMTUwMTM4ZTUtMjhkM2Q3NWI=, TxId: 2025-11-03T14:04:08.202704Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4238:3604], ActorId: [2:4239:3605], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGYyZDI3ZWQtNDQ2NGU2ZjMtMTUwMTM4ZTUtMjhkM2Q3NWI=, TxId: 2025-11-03T14:04:08.202999Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4237:3603], ActorId: [2:4238:3604], Got response [2:4239:3605] SUCCESS 2025-11-03T14:04:08.203091Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-11-03T14:04:08.203388Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4260:3617]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:08.203453Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2025-11-03T14:04:08.203461Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 32, ReplyToActorId = [2:4260:3617], StatRequests.size() = 1 2025-11-03T14:04:08.225767Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 4] 2025-11-03T14:04:08.225793Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-11-03T14:04:09.284608Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4299:3635]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:09.284725Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2025-11-03T14:04:09.284737Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 33, ReplyToActorId = [2:4299:3635], StatRequests.size() = 1 2025-11-03T14:04:10.324059Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-11-03T14:04:10.324213Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 7 2025-11-03T14:04:10.324306Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 0 2025-11-03T14:04:10.324325Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2025-11-03T14:04:10.334911Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-11-03T14:04:10.334937Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-11-03T14:04:10.334947Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-11-03T14:04:10.334953Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:752: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-11-03T14:04:10.335052Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:4332:3651], ActorId: [2:4333:3652], Starting query actor #1 [2:4334:3653] 2025-11-03T14:04:10.335068Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:4333:3652], ActorId: [2:4334:3653], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2025-11-03T14:04:10.335996Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:4333:3652], ActorId: [2:4334:3653], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZjVkOGY1YmQtZGM5YTI0MmYtOTUxNTMxMy00OTA4OGEz, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-11-03T14:04:10.340302Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:4333:3652], ActorId: [2:4334:3653], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjVkOGY1YmQtZGM5YTI0MmYtOTUxNTMxMy00OTA4OGEz, TxId: 2025-11-03T14:04:10.340321Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:4333:3652], ActorId: [2:4334:3653], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjVkOGY1YmQtZGM5YTI0MmYtOTUxNTMxMy00OTA4OGEz, TxId: 2025-11-03T14:04:10.340386Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:4332:3651], ActorId: [2:4333:3652], Got response [2:4334:3653] SUCCESS 2025-11-03T14:04:10.340440Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-11-03T14:04:10.340715Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4355:3665]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:10.340789Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2025-11-03T14:04:10.340796Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 34, ReplyToActorId = [2:4355:3665], StatRequests.size() = 1 >> TIncrementalRestoreTests::IncrementalRestorePersistenceRowsLifecycle >> TIncrementalRestoreTests::CopyTableChangeStateSupport >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence >> test.py::test[aggregate-group_by_session--Results] [GOOD] >> test.py::test[aggregate-group_by_session_distinct_compact--Results] >> TopicAutoscaling::ControlPlane_CDC_Enable [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Disable >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate [GOOD] >> TSchemeShardTest::AlterTableKeyColumns >> TSchemeShardTest::Restart [GOOD] >> TSchemeShardTest::SchemeErrors >> KqpResultSetFormats::ArrowFormat_Simple >> TIncrementalRestoreTests::CopyTableChangeStateSupport [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable >> TSchemeShardTest::SchemeErrors [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec >> TSchemeShardTest::SerializedCellVec [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> KqpResultSetFormats::DefaultFormat >> TSchemeShardTest::AlterTableFollowers >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::CopyTableChangeStateSupport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:04:11.529571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:04:11.529593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:11.529605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:04:11.529661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:04:11.529669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:04:11.529674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:04:11.529685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:11.529699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:04:11.529804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:04:11.529868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:04:11.547493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:04:11.547515Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:11.551061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:04:11.551140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:04:11.551168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:04:11.553022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:04:11.553116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:04:11.553216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:11.553416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:11.553966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:11.554016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:04:11.554232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:11.554245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:11.554273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:04:11.554282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:11.554288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:04:11.554313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.555679Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-11-03T14:04:11.579799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:11.579868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.579925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:11.579933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:11.580009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:11.580023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:11.580725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:11.580774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:11.580828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.580838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:11.580844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:11.580849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:11.581403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.581422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:11.581430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:11.581983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.581997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.582004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:11.582011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:11.582662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:11.583176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:11.583244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:11.583454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:11.583481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:11.583488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:11.583570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:11.583578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:11.583606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:11.583618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:11.584693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:11.584704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_ ... ::TEvSchemaChanged> complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.813313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5906: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 4294969617 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-11-03T14:04:11.813338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 0 2025-11-03T14:04:11.813358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 335 RawX2: 4294969617 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-11-03T14:04:11.813365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1052: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2025-11-03T14:04:11.813390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1056: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 335 RawX2: 4294969617 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-11-03T14:04:11.813401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:11.813405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.813409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-11-03T14:04:11.813416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-11-03T14:04:11.813420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 129 -> 240 2025-11-03T14:04:11.813797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.813868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.813874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 103:0ProgressState, operation type TxCopyTable 2025-11-03T14:04:11.813880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1079: Set barrier, OperationId: 103:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-11-03T14:04:11.813884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1123: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-11-03T14:04:11.813892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-11-03T14:04:11.813896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 103:0 240 -> 240 2025-11-03T14:04:11.814178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.814187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2025-11-03T14:04:11.814197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-11-03T14:04:11.814201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-11-03T14:04:11.814204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#103:0 progress is 1/1 2025-11-03T14:04:11.814207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-11-03T14:04:11.814213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-11-03T14:04:11.814228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:363:2341] message: TxId: 103 2025-11-03T14:04:11.814236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-11-03T14:04:11.814243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 103:0 2025-11-03T14:04:11.814247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 103:0 2025-11-03T14:04:11.814276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-11-03T14:04:11.814280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-11-03T14:04:11.814562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-11-03T14:04:11.814571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:471:2430] TestWaitNotification: OK eventTxId 103 2025-11-03T14:04:11.814682Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/dst1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-11-03T14:04:11.814750Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/dst1" took 77us result status StatusSuccess 2025-11-03T14:04:11.815089Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/dst1" PathDescription { Self { Name: "dst1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateIncomingIncrementalRestore Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpResultSetFormats::ArrowFormat_EmptyBatch >> KqpResultSetFormats::ValueFormat_Simple >> TIncrementalRestoreTests::MultipleCollectionsGenerateMultipleTEvRunIncrementalRestoreEvents [GOOD] >> TIncrementalRestoreTests::IncrementalRestorePersistenceRowsLifecycle [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> test.py::test[aggregate-group_by_expr_with_join--Results] [GOOD] >> test.py::test[aggregate-group_by_full_path-default.txt-Results] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::MultipleCollectionsGenerateMultipleTEvRunIncrementalRestoreEvents [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:04:10.804337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:04:10.804363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:10.804374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:04:10.804382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:04:10.804390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:04:10.804395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:04:10.804404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:10.804419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:04:10.804546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:04:10.804603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:04:10.821221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:04:10.821250Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:10.825164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:04:10.825241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:04:10.825274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:04:10.827216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:04:10.827301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:04:10.827413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:10.827637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:10.828145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:10.828195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:04:10.828437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:10.828445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:10.828473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:04:10.828481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:10.828487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:04:10.828514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.829972Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-11-03T14:04:10.851317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:10.851412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.851482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:10.851491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:10.851576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:10.851591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:10.852505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:10.852563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:10.852625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.852635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:10.852642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:10.852648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:10.853016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.853024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:10.853032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:10.853339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.853346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.853353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:10.853360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:10.854029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:10.854399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:10.854448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:10.854677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:10.854700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:10.854707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:10.854774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:10.854780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:10.854833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:10.854845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:10.855221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:10.855228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_ ... 25-11-03T14:04:12.216461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_restore_backup_collection.cpp:62: [72057594046678944] TDoneWithIncrementalRestore, operationId: 126:2HandleReply TEvCompleteBarrier 2025-11-03T14:04:12.216470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_restore_backup_collection.cpp:98: [72057594046678944] TDoneWithIncrementalRestore, operationId: 126:2 Found 1 incremental backups to restore 2025-11-03T14:04:12.216478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#126:2 progress is 3/3 2025-11-03T14:04:12.216482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 126 ready parts: 3/3 2025-11-03T14:04:12.216486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#126:0 progress is 3/3 2025-11-03T14:04:12.216490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 126 ready parts: 3/3 2025-11-03T14:04:12.216495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 126, ready parts: 2/3, is published: true 2025-11-03T14:04:12.216499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 126, ready parts: 3/3, is published: true 2025-11-03T14:04:12.216516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:308:2298] message: TxId: 126 2025-11-03T14:04:12.216523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 126 ready parts: 3/3 2025-11-03T14:04:12.216530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 126:0 2025-11-03T14:04:12.216535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 126:0 2025-11-03T14:04:12.216569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 3 2025-11-03T14:04:12.216573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 3 2025-11-03T14:04:12.216579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 126:1 2025-11-03T14:04:12.216582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 126:1 2025-11-03T14:04:12.216586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 2 2025-11-03T14:04:12.216589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 126:2 2025-11-03T14:04:12.216591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 126:2 2025-11-03T14:04:12.216595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 3 2025-11-03T14:04:12.217236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-11-03T14:04:12.217249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:2022:3828] 2025-11-03T14:04:12.217293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:366: [IncrementalRestore] Handle(TEvRunIncrementalRestore) starting sequential processing for 1 incremental backups backupCollectionPathId: [OwnerId: 72057594046678944, LocalPathId: 20] operationId: 126:2 tablet: 72057594046678944 2025-11-03T14:04:12.217303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:391: [IncrementalRestore] Handle(TEvRunIncrementalRestore) added incremental backup: 'backup_002' 2025-11-03T14:04:12.217307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:394: [IncrementalRestore] Handle(TEvRunIncrementalRestore) state now has 1 incremental backups 2025-11-03T14:04:12.217318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 126 tablet: 72057594046678944 2025-11-03T14:04:12.217335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=0, CompletedOperations.size()=0, CurrentIncrementalIdx=0, IncrementalBackups.size()=1 2025-11-03T14:04:12.217339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:106: [IncrementalRestore] No operations in progress, starting first incremental backup 2025-11-03T14:04:12.217344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:193: [IncrementalRestore] Processing incremental backup #1 path: backup_002 timestamp: 0 2025-11-03T14:04:12.217349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:520: [IncrementalRestore] CreateIncrementalRestoreOperation for backup: backup_002 operationId: 126 backupCollectionPathId: [OwnerId: 72057594046678944, LocalPathId: 20] 2025-11-03T14:04:12.217383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:546: [IncrementalRestore] Creating separate restore operation for table: /MyRoot/.backups/collections/Collection3/backup_002_incremental/Table3 -> /MyRoot/Table3 2025-11-03T14:04:12.217402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:584: [IncrementalRestore] Table operation 281474976710662:0 expects 1 shards 2025-11-03T14:04:12.217406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:588: [IncrementalRestore] Tracking operation 281474976710662:0 for incremental restore 126 2025-11-03T14:04:12.217410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:591: [IncrementalRestore] Sending MultiIncrementalRestore operation for table: /MyRoot/Table3 2025-11-03T14:04:12.217419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:598: [IncrementalRestore] Created separate restore operations for incremental backup: backup_002 2025-11-03T14:04:12.218144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.backups/collections/Collection3" OperationType: ESchemeOpRestoreMultipleIncrementalBackups Internal: true RestoreMultipleIncrementalBackups { SrcTablePaths: "/MyRoot/.backups/collections/Collection3/backup_002_incremental/Table3" DstTablePath: "/MyRoot/Table3" } } TxId: 281474976710662 , at schemeshard: 72057594046678944 2025-11-03T14:04:12.218172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_restore_incremental_backup.cpp:520: [72057594046678944] CreateRestoreMultipleIncrementalBackups: opId# 281474976710662:0, tx# WorkingDir: "/MyRoot/.backups/collections/Collection3" OperationType: ESchemeOpRestoreMultipleIncrementalBackups Internal: true RestoreMultipleIncrementalBackups { SrcTablePaths: "/MyRoot/.backups/collections/Collection3/backup_002_incremental/Table3" DstTablePath: "/MyRoot/Table3" } 2025-11-03T14:04:12.218206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_restore_incremental_backup.cpp:362: [72057594046678944] TNewRestoreFromAtTable Propose: opId# 281474976710662:0, srcs# [/MyRoot/.backups/collections/Collection3/backup_002_incremental/Table3], dst# /MyRoot/Table3 2025-11-03T14:04:12.218228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 1 2025-11-03T14:04:12.218232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 2 2025-11-03T14:04:12.218238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 281474976710662:0 type: TxRestoreIncrementalBackupAtTable target path: [OwnerId: 72057594046678944, LocalPathId: 24] source path: [OwnerId: 72057594046678944, LocalPathId: 27] 2025-11-03T14:04:12.218244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_restore_incremental_backup.cpp:476: TNewRestoreFromAtTable Propose opId# 281474976710662:0 workingDir# /MyRoot/.backups/collections/Collection3 dstTablePath# /MyRoot/Table3 pathId# [OwnerId: 72057594046678944, LocalPathId: 27] 2025-11-03T14:04:12.218255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710662:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:12.218936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 126 2025-11-03T14:04:12.219035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710662, response: Status: StatusAccepted TxId: 281474976710662 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:12.219086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710662, database: /MyRoot, subject: , status: StatusAccepted, operation: RESTORE, dst path: /MyRoot/.backups/collections/Collection3/backup_002_incremental/Table3, dst path: /MyRoot/Table3 2025-11-03T14:04:12.219121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7125: Handle: TEvModifySchemeTransactionResult: txId# 281474976710662, status# StatusAccepted 2025-11-03T14:04:12.219129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7127: Message: Status: StatusAccepted TxId: 281474976710662 SchemeshardId: 72057594046678944 2025-11-03T14:04:12.219139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 126 tablet: 72057594046678944 2025-11-03T14:04:12.219158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=1, CompletedOperations.size()=0, CurrentIncrementalIdx=0, IncrementalBackups.size()=1 2025-11-03T14:04:12.219201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710662:0, at schemeshard: 72057594046678944 2025-11-03T14:04:12.219208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_restore_incremental_backup.cpp:85: NIncrRestoreState::TConfigurePartsAtTable operationId: 281474976710662:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:12.219660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 126 2025-11-03T14:04:12.219854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 281474976710662:0 from tablet: 72057594046678944 to tablet: 72075186233409554 cookie: 72057594046678944:9 msg type: 269549568 2025-11-03T14:04:12.219887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710662, partId: 0, tablet: 72075186233409554 TestWaitNotification: OK eventTxId 126 Successfully verified 3 TEvRunIncrementalRestore events for 3 unique collections with 3 operations in database ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TIncrementalRestoreTests::IncrementalRestorePersistenceRowsLifecycle [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:04:11.478103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:04:11.478122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:11.478129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:04:11.478134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:04:11.478140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:04:11.478143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:04:11.478150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:11.478159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:04:11.478235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:04:11.478276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:04:11.495126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:04:11.495152Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:11.498832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:04:11.498910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:04:11.498939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:04:11.500780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:04:11.500869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:04:11.500975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:11.501161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:11.501682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:11.501734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:04:11.501966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:11.501976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:11.502004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:04:11.502015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:11.502023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:04:11.502050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.503463Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-11-03T14:04:11.522391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:11.522450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.522500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:11.522506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:11.522556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:11.522566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:11.523346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:11.523390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:11.523448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.523456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:11.523460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:11.523464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:11.523803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.523812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:11.523816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:11.524568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.524575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:11.524579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:11.524585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:11.525032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:11.525356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:11.525395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:11.525535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:11.525559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:11.525565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:11.525610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:11.525617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:11.525644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:11.525655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:11.526067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:11.526074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_ ... :12.058271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 1/1, is published: false 2025-11-03T14:04:12.058277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710658 ready parts: 1/1 2025-11-03T14:04:12.058283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710658:0 2025-11-03T14:04:12.058287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710658:0 2025-11-03T14:04:12.058311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 3 2025-11-03T14:04:12.058316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-11-03T14:04:12.058323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:608: [IncrementalRestore] Operation 281474976710658:0 completed, triggering progress check for incremental restore 110 2025-11-03T14:04:12.058330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 281474976710658, publications: 1, subscribers: 0 2025-11-03T14:04:12.058334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 281474976710658, [OwnerId: 72057594046678944, LocalPathId: 10], 3 2025-11-03T14:04:12.058410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:408: [IncrementalRestore] Handle(TEvProgressIncrementalRestore) operationId: 110 tablet: 72057594046678944 2025-11-03T14:04:12.058418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 110 tablet: 72057594046678944 2025-11-03T14:04:12.058431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:171: [IncrementalRestore] Operation 281474976710658:0 completed for incremental restore 110 2025-11-03T14:04:12.058440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:66: [IncrementalRestore] Persisted CompletedOperations update: @ 2025-11-03T14:04:12.058446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:73: [IncrementalRestore] Checking completion: InProgressOperations.size()=0, CompletedOperations.size()=1, CurrentIncrementalIdx=1, IncrementalBackups.size()=2 2025-11-03T14:04:12.058450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:76: [IncrementalRestore] All operations for current incremental backup completed, moving to next 2025-11-03T14:04:12.058457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:86: [IncrementalRestore] After MoveToNextIncremental: CurrentIncrementalIdx=2, IncrementalBackups.size()=2 2025-11-03T14:04:12.058461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:89: [IncrementalRestore] All incremental backups processed, performing finalization 2025-11-03T14:04:12.058468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:210: [IncrementalRestore] Starting finalization of incremental restore operation: 110 2025-11-03T14:04:12.058481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:234: [IncrementalRestore] Sending finalization operation with txId: 281474976710659 2025-11-03T14:04:12.059297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpIncrementalRestoreFinalize Internal: true IncrementalRestoreFinalize { OriginalOperationId: 110 BackupCollectionPathId: 4 TargetTablePaths: "/MyRoot/PersistTable" } } TxId: 281474976710659 , at schemeshard: 72057594046678944 2025-11-03T14:04:12.059334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:209: [72057594046678944] TIncrementalRestoreFinalizeOp Propose, opId: 281474976710659:0 2025-11-03T14:04:12.059344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-11-03T14:04:12.059351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 281474976710659:0 type: TxIncrementalRestoreFinalize target path: [OwnerId: 72057594046678944, LocalPathId: 4] source path: 2025-11-03T14:04:12.059361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:12.060183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:12.060199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710658, path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2025-11-03T14:04:12.060827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:115: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 110 2025-11-03T14:04:12.060865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710659, response: Status: StatusAccepted TxId: 281474976710659 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:12.060899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710659, subject: , status: StatusAccepted, operation: RESTORE INCREMENTAL FINALIZE, no path 2025-11-03T14:04:12.060917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:12.060923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:212:2213], at schemeshard: 72057594046678944, txId: 281474976710658, path id: 10 2025-11-03T14:04:12.061043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7125: Handle: TEvModifySchemeTransactionResult: txId# 281474976710659, status# StatusAccepted 2025-11-03T14:04:12.061054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7127: Message: Status: StatusAccepted TxId: 281474976710659 SchemeshardId: 72057594046678944 2025-11-03T14:04:12.061061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7146: no able to determine destination for message TEvModifySchemeTransactionResult: txId: 281474976710659, at schemeshard: 72057594046678944 2025-11-03T14:04:12.061083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710659:0, at schemeshard: 72057594046678944 2025-11-03T14:04:12.061095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:56: [72057594046678944] TIncrementalRestoreFinalize TPropose operationId: 281474976710659:0 ProgressState 2025-11-03T14:04:12.061109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:111: [72057594046678944] Adding target table path to normalize: /MyRoot/PersistTable 2025-11-03T14:04:12.061116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:144: [72057594046678944] Marked incremental restore state as completed for operation: 110 2025-11-03T14:04:12.061120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:147: [72057594046678944] Keeping IncrementalRestoreOperations entry for operation: 110 - will be cleaned up on FORGET 2025-11-03T14:04:12.061125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:150: [72057594046678944] Cleaned up long incremental restore ops for operation: 110 2025-11-03T14:04:12.061131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:176: [72057594046678944] Cleaned up mappings for operation: 110 2025-11-03T14:04:12.061136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_incremental_restore_finalize.cpp:82: [72057594046678944] Cleaning up 1 shard progress entries for operation 110 2025-11-03T14:04:12.061158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710659:0 progress is 1/1 2025-11-03T14:04:12.061163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-11-03T14:04:12.061169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#281474976710659:0 progress is 1/1 2025-11-03T14:04:12.061172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-11-03T14:04:12.061178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710659, ready parts: 1/1, is published: true 2025-11-03T14:04:12.061184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-11-03T14:04:12.061189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 281474976710659:0 2025-11-03T14:04:12.061192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 281474976710659:0 2025-11-03T14:04:12.061202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-11-03T14:04:12.061282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710658 2025-11-03T14:04:12.061294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710658 2025-11-03T14:04:12.061300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710658 2025-11-03T14:04:12.061305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710658, pathId: [OwnerId: 72057594046678944, LocalPathId: 10], version: 3 2025-11-03T14:04:12.061312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-11-03T14:04:12.061330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710658, subscribers: 0 2025-11-03T14:04:12.062171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710659:0 2025-11-03T14:04:12.062223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710658 >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig [GOOD] >> TSchemeShardTest::RejectAlterSolomon >> TSchemeShardTest::AlterTableFollowers [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit >> KqpResultSetFormats::ArrowFormat_Simple [GOOD] >> KqpResultSetFormats::ArrowFormat_SmallChannelBufferSize >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex >> TSchemeShardTest::RejectAlterSolomon [GOOD] >> TSchemeShardTest::SimultaneousDropForceDrop >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TSchemeShardTest::AlterTableSplitSchema >> test.py::test[join-join_comp_common_table--Results] [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::SingleKey >> KqpResultSetFormats::DefaultFormat [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_String >> TSchemeShardTest::SimultaneousDropForceDrop [GOOD] >> test.py::test[join-join_no_correlation_in_order_by-off-Results] [SKIPPED] >> test.py::test[join-left_only_semi_and_other--Results] >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-false >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 >> KqpResultSetFormats::ValueFormat_Simple [GOOD] >> KqpResultSetFormats::ValueFormat_SmallChannelBufferSize >> KqpResultSetFormats::ArrowFormat_EmptyBatch [GOOD] >> KqpResultSetFormats::ArrowFormat_AllTypes+isOlap >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC |56.5%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_incremental_restore/unittest >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-false [GOOD] >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-true >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> TSchemeShardTest::AlterTableSettings >> KqpResultSetFormats::ArrowFormat_SmallChannelBufferSize [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Arithmetic >> DataShardTxOrder::RandomPointsAndRanges >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-true [GOOD] >> TSchemeShardTest::SplitKey [GOOD] >> TSchemeShardTest::SplitAlterCopy ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:04:10.805614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:04:10.805637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:10.805643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:04:10.805651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:04:10.805659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:04:10.805664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:04:10.805673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:10.805687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:04:10.805769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:04:10.805816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:04:10.818226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:04:10.818245Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:10.821566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:04:10.821631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:04:10.821655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:04:10.823989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:04:10.824117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:04:10.824260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:10.824571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:10.825177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:10.825236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:04:10.825507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:10.825519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:10.825564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:04:10.825575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:10.825582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:04:10.825610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.827019Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-11-03T14:04:10.846283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:10.846373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.846436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:10.846446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:10.846527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:10.846544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:10.849625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:10.849683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:10.849732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.849742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:10.849746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:10.849751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:10.850233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.850243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:10.850250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:10.850683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.850698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.850703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:10.850709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:10.851320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:10.851736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:10.851816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:10.851987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:10.852009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:10.852014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:10.852067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:10.852072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:10.852096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:10.852105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:10.852523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:10.852533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_ ... INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:2 progress is 3/4 2025-11-03T14:04:13.566473Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-11-03T14:04:13.566477Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-11-03T14:04:13.566530Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-11-03T14:04:13.566535Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 102:0 2025-11-03T14:04:13.566546Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [7:344:2321] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-11-03T14:04:13.566654Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-11-03T14:04:13.566661Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-11-03T14:04:13.566693Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435072, Sender [7:129:2153], Recipient [7:129:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-11-03T14:04:13.566710Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5247: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-11-03T14:04:13.566716Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-11-03T14:04:13.566722Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:13.566763Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-11-03T14:04:13.566798Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-11-03T14:04:13.566803Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 4/4 2025-11-03T14:04:13.566807Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-11-03T14:04:13.566812Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#102:0 progress is 4/4 2025-11-03T14:04:13.566816Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-11-03T14:04:13.566823Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-11-03T14:04:13.566835Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:416:2372] message: TxId: 102 2025-11-03T14:04:13.566841Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-11-03T14:04:13.566848Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:0 2025-11-03T14:04:13.566853Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:0 2025-11-03T14:04:13.566876Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-11-03T14:04:13.566882Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:1 2025-11-03T14:04:13.566886Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:1 2025-11-03T14:04:13.566891Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-11-03T14:04:13.566895Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:2 2025-11-03T14:04:13.566899Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:2 2025-11-03T14:04:13.566905Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-11-03T14:04:13.566910Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 102:3 2025-11-03T14:04:13.566914Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 102:3 2025-11-03T14:04:13.566922Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-11-03T14:04:13.567012Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-11-03T14:04:13.567018Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-11-03T14:04:13.567042Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 2146435084, Sender [7:129:2153], Recipient [7:129:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-11-03T14:04:13.567047Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5419: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-11-03T14:04:13.567055Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-11-03T14:04:13.567060Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-11-03T14:04:13.567071Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-11-03T14:04:13.567095Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-11-03T14:04:13.567102Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-11-03T14:04:13.567107Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-11-03T14:04:13.567110Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-11-03T14:04:13.567117Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-11-03T14:04:13.567120Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-11-03T14:04:13.567457Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-11-03T14:04:13.567467Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-11-03T14:04:13.567820Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-11-03T14:04:13.567846Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-11-03T14:04:13.567861Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:641: Send to actor: [7:416:2372] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2025-11-03T14:04:13.567895Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-11-03T14:04:13.567906Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-11-03T14:04:13.567912Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:522:2471] 2025-11-03T14:04:13.567955Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [7:524:2473], Recipient [7:129:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-11-03T14:04:13.567961Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-11-03T14:04:13.567965Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-11-03T14:04:13.568053Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [7:599:2548], Recipient [7:129:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-11-03T14:04:13.568059Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-11-03T14:04:13.568073Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-11-03T14:04:13.568114Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 38us result status StatusPathDoesNotExist 2025-11-03T14:04:13.568154Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 102, source_location: ydb/core/tx/schemeshard/schemeshard_path_describer.cpp:1181" Path: "/MyRoot/Table" PathId: 2 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] >> test.py::test[window-row_number_no_part_from_subq-default.txt-Results] [GOOD] >> test.py::test[window-win_by_all_avg_interval-default.txt-Results] >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK >> KqpResultSetFormats::ValueFormat_SmallChannelBufferSize [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Unspecified >> TSchemeShardTest::AlterTableSettings [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume >> KqpResultSetFormats::ArrowFormat_Types_String [GOOD] >> Viewer::TabletMergingPacked >> KqpResultSetFormats::ArrowFormat_Types_Time >> TSchemeShardTest::SplitAlterCopy [GOOD] >> TSchemeShardTest::TopicReserveSize ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2025-11-03T14:04:01.625865Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:01.625907Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:01.630757Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:01.630852Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:01.662666Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:01.664061Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:137:2162], cookie=4484399960076857752, path="/Res", config={ MaxUnitsPerSecond: -100 }) 2025-11-03T14:04:01.664124Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:137:2162], cookie=4484399960076857752) 2025-11-03T14:04:01.664284Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:145:2167], cookie=13197448920652309032, path="/ResWithoutMaxUnitsPerSecond", config={ }) 2025-11-03T14:04:01.664296Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:145:2167], cookie=13197448920652309032) 2025-11-03T14:04:01.664347Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:148:2170], cookie=12343042697967632912, path="/ResWithMaxUnitsPerSecond", config={ MaxUnitsPerSecond: 1 }) 2025-11-03T14:04:01.664396Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "ResWithMaxUnitsPerSecond" 2025-11-03T14:04:01.675179Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:148:2170], cookie=12343042697967632912) 2025-11-03T14:04:01.675326Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:153:2175], cookie=13229844003358827289, path="/ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond", config={ }) 2025-11-03T14:04:01.675386Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond" 2025-11-03T14:04:01.686009Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:153:2175], cookie=13229844003358827289) 2025-11-03T14:04:01.883222Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:01.883249Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:01.886205Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:01.886330Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:01.907782Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:01.907955Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:138:2162], cookie=12446773053227990868, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-11-03T14:04:01.908055Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-11-03T14:04:01.918857Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:138:2162], cookie=12446773053227990868) 2025-11-03T14:04:01.919006Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:148:2170], cookie=9376000904479547220, path="/Root/Res", config={ }) 2025-11-03T14:04:01.919055Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-11-03T14:04:01.929848Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:148:2170], cookie=9376000904479547220) 2025-11-03T14:04:01.930269Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:153:2175]. Cookie: 4019943946636468416. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:01.930283Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:153:2175], cookie=4019943946636468416) 2025-11-03T14:04:01.930353Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:153:2175]. Cookie: 14336661855626875037. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2025-11-03T14:04:01.930358Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:153:2175], cookie=14336661855626875037) 2025-11-03T14:04:03.762530Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.762557Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.765457Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.765504Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.786439Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:03.786548Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:138:2162], cookie=9306474213197626147, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-11-03T14:04:03.786613Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-11-03T14:04:03.799140Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:138:2162], cookie=9306474213197626147) 2025-11-03T14:04:03.799327Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:148:2170], cookie=5699585852239064268, path="/Root/Res", config={ }) 2025-11-03T14:04:03.799384Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-11-03T14:04:03.810348Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:148:2170], cookie=5699585852239064268) 2025-11-03T14:04:03.810606Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:153:2175]. Cookie: 4578526885029646126. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:03.810622Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:153:2175], cookie=4578526885029646126) 2025-11-03T14:04:03.810733Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:153:2175]. Cookie: 5456773761439909995. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019000 } } 2025-11-03T14:04:03.810742Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:153:2175], cookie=5456773761439909995) 2025-11-03T14:04:05.657880Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:05.657911Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:05.661061Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:05.661095Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:05.682793Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:05.682985Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:137:2162], cookie=926203043988850939, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-11-03T14:04:05.683082Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-11-03T14:04:05.704143Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:137:2162], cookie=926203043988850939) 2025-11-03T14:04:05.704337Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:148:2170]. Cookie: 9489247618249676874. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:05.704344Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:148:2170], cookie=9489247618249676874) 2025-11-03T14:04:05.704408Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:148:2170]. Cookie: 17640600827716659131. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-11-03T14:04:05.704412Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:148:2170], cookie=17640600827716659131) 2025-11-03T14:04:05.704447Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:148:2170]. Cookie: 14445439828805723094. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-11-03T14:04:05.704450Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:148:2170], cookie=14445439828805723094) 2025-11-03T14:04:07.745901Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:07.745942Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:07.751928Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:07.752018Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:07.784929Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:07.785128Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:137:2162], cookie=8057533177195838353, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-11-03T14:04:07.785229Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-11-03T14:04:07.803396Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:137:2162], cookie=8057533177195838353) 2025-11-03T14:04:07.803776Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:148:2170]. Cookie: 1019673004536350187. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:07.803792Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:148:2170], cookie=1019673004536350187) 2025-11-03T14:04:07.803920Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:148:2170]. Cookie: 9455104829940961983. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2025-11-03T14:04:07.803929Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:148:2170], cookie=9455104829940961983) 2025-11-03T14:04:10.246275Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:187:2194]. Cookie: 7684390043382044516. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:10.246300Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:187:2194], cookie=7684390043382044516) 2025-11-03T14:04:10.246398Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:187:2194]. Cookie: 4296283706993780139. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2025-11-03T14:04:10.246406Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:187:2194], cookie=4296283706993780139) 2025-11-03T14:04:12.327765Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:220:2220]. Cookie: 13505178466906259102. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:12.327790Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:220:2220], cookie=13505178466906259102) 2025-11-03T14:04:12.327871Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:220:2220]. Cookie: 6723260707610142067. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2025-11-03T14:04:12.327877Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:220:2220], cookie=6723260707610142067) >> Viewer::FuzzySearcherLimit2OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit3OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit4OutOf4 [GOOD] >> Viewer::FuzzySearcherLongWord [GOOD] >> Viewer::FuzzySearcherPriority [GOOD] >> Viewer::JsonAutocompleteColumns >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter >> KqpResultSetFormats::ArrowFormat_Types_Arithmetic [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Binary >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams >> Viewer::Cluster10000Tablets >> Viewer::LevenshteinDistance [GOOD] >> Viewer::JsonStorageListingV2 >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter >> Viewer::JsonAutocompleteEmpty >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite >> Viewer::PDiskMerging >> TSchemeShardTest::TopicReserveSize [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize >> Viewer::TabletMerging >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Unspecified [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Always >> Viewer::TabletMergingPacked [GOOD] >> Viewer::VDiskMerging >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter >> Viewer::PDiskMerging [GOOD] >> Viewer::SelectStringWithBase64Encoding >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] >> Viewer::SelectStringWithNoBase64Encoding >> Viewer::JsonAutocompleteSimilarDatabaseName >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits >> KqpResultSetFormats::ArrowFormat_Types_Time [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_1 >> Viewer::JsonAutocompleteStartOfDatabaseName >> KqpResultSetFormats::ArrowFormat_Types_Binary [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Unspecified >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink >> Viewer::VDiskMerging [GOOD] >> Viewer::TenantInfo5kkTablets >> Viewer::JsonAutocompleteColumns [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Always [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits >> TPersQueueMirrorer::ValidStartStream [GOOD] >> TPersQueueMirrorerWithScaling::TestBasicRemote ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:04:10.716756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:04:10.716779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:10.716784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:04:10.716790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:04:10.716796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:04:10.716799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:04:10.716806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:10.716822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:04:10.716915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:04:10.716968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:04:10.733015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:04:10.733041Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:10.736994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:04:10.737080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:04:10.737123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:04:10.739836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:04:10.739959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:04:10.740077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:10.740544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:10.744655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:10.744724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:04:10.745014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:10.745026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:10.745052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:04:10.745061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:10.745067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:04:10.745096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.746953Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-11-03T14:04:10.771913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:10.772017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.772092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:10.772101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:10.772156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:10.772175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:10.773151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:10.773203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:10.773259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.773269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:10.773275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:10.773281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:10.773704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.773717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:10.773725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:10.774116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.774127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.774133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:10.774148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:10.774894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:10.775296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:10.775341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:10.775568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:10.775593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:10.775601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:10.775670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:10.775677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:10.775722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:10.775734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:10.776142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:10.776150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_ ... rtByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-11-03T14:04:16.190244Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 Status: COMPLETE TxId: 104 Step: 5000005 2025-11-03T14:04:16.190251Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409549 Status: COMPLETE TxId: 104 Step: 5000005 2025-11-03T14:04:16.190256Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-11-03T14:04:16.190260Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-11-03T14:04:16.190308Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 104:0 128 -> 240 2025-11-03T14:04:16.190346Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-11-03T14:04:16.192417Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-11-03T14:04:16.192465Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-11-03T14:04:16.192481Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-11-03T14:04:16.192493Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-11-03T14:04:16.192505Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-11-03T14:04:16.192519Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-11-03T14:04:16.192552Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:16.192560Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-11-03T14:04:16.192635Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:16.192641Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [14:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-11-03T14:04:16.192655Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-11-03T14:04:16.192663Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2025-11-03T14:04:16.192680Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-11-03T14:04:16.192684Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-11-03T14:04:16.192689Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#104:0 progress is 1/1 2025-11-03T14:04:16.192693Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-11-03T14:04:16.192698Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-11-03T14:04:16.192704Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-11-03T14:04:16.192711Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 104:0 2025-11-03T14:04:16.192716Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 104:0 2025-11-03T14:04:16.192766Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 10 2025-11-03T14:04:16.192774Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-11-03T14:04:16.192779Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-11-03T14:04:16.192985Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-11-03T14:04:16.192998Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-11-03T14:04:16.193003Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-11-03T14:04:16.193008Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-11-03T14:04:16.193013Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-11-03T14:04:16.193027Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-11-03T14:04:16.194581Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-11-03T14:04:16.200380Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-11-03T14:04:16.200400Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-11-03T14:04:16.200488Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-11-03T14:04:16.200511Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-11-03T14:04:16.200517Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [14:1460:3240] TestWaitNotification: OK eventTxId 104 2025-11-03T14:04:16.200613Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-11-03T14:04:16.200662Z node 14 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 68us result status StatusSuccess 2025-11-03T14:04:16.200857Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 4 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 6 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 7 PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409549 KeyRange { FromBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 5 } Partitions { PartitionId: 3 TabletId: 72075186233409550 KeyRange { ToBound: "0" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409551 KeyRange { FromBound: "0" ToBound: "A" } Status: Inactive ParentPartitionIds: 1 ChildPartitionIds: 5 } Partitions { PartitionId: 5 TabletId: 72075186233409552 KeyRange { FromBound: "0" } Status: Active ParentPartitionIds: 2 ParentPartitionIds: 4 } AlterVersion: 4 BalancerTabletID: 72075186233409547 NextPartitionId: 6 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 494 AccountSize: 494 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar >> Viewer::JsonAutocompleteEmpty [GOOD] >> Viewer::JsonAutocompleteEndOfDatabaseName >> TReplicationWithRebootsTests::CreateDropRecreate >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumns [GOOD] Test command err: 2025-11-03T14:04:16.263040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:16.297853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:16.299238Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:304:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:16.299283Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:16.299320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:16.375206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:16.375250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:16.379686Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:16.381103Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178655395988 != 1762178655395992 2025-11-03T14:04:16.416195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21806, node 1 TClient is connected to server localhost:27778 2025-11-03T14:04:16.492277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:16.492295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:16.492301Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:16.492405Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":6,"Entities":[{"Name":"name","Type":"column","Parent":"orders"},{"Name":"name","Type":"column","Parent":"products"},{"Name":"id","Type":"column","Parent":"orders"},{"Name":"id","Type":"column","Parent":"products"},{"Name":"description","Type":"column","Parent":"orders"},{"Name":"description","Type":"column","Parent":"products"}]},"Version":2} >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::AlterTopicOverDiskSpaceQuotas >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Unspecified [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly |56.5%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |56.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |56.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly_Multistatement >> Viewer::JsonAutocompleteSimilarDatabaseName [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_1 [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite >> KqpResultSetFormats::ArrowFormat_Types_Optional_2 >> Viewer::JsonAutocompleteStartOfDatabaseName [GOOD] >> Viewer::JsonStorageListingV1 |56.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/tools/fqrun/fqrun |56.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun >> TSchemeShardTest::AlterTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] |56.6%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |56.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage >> TopicAutoscaling::ControlPlane_CDC_Disable [GOOD] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate |56.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |56.6%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |56.6%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |56.6%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |56.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> Viewer::JsonAutocompleteEndOfDatabaseName [GOOD] >> Viewer::JsonAutocompleteScheme >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly_Multistatement ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] Test command err: 2025-11-03T14:04:15.113802Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:15.164227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:15.166994Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:15.167092Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:15.167135Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e2b/r3tmp/tmpCxDqHu/pdisk_1.dat 2025-11-03T14:04:15.251715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:15.251764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:15.274494Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:15.279696Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178654480352 != 1762178654480356 2025-11-03T14:04:15.315228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:15.372275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:15.427388Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:15.511593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:15.532025Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:04:15.532295Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:04:15.532369Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:674:2566] 2025-11-03T14:04:15.532435Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:04:15.553434Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:04:15.553687Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:04:15.553723Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:04:15.553925Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:04:15.553936Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:04:15.553945Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:04:15.554014Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:04:15.554044Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:04:15.554059Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-11-03T14:04:15.566724Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:04:15.572369Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:04:15.572484Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:04:15.572521Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-11-03T14:04:15.572528Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:04:15.572534Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:04:15.572541Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:04:15.572639Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:674:2566], Recipient [1:674:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:04:15.572648Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:04:15.572781Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:04:15.572809Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:04:15.572932Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:04:15.572945Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:04:15.572953Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-11-03T14:04:15.572959Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-11-03T14:04:15.572965Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-11-03T14:04:15.572971Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:04:15.572977Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:04:15.572994Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:681:2570], Recipient [1:674:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:04:15.573000Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:04:15.573008Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:673:2565], serverId# [1:681:2570], sessionId# [0:0:0] 2025-11-03T14:04:15.573022Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:681:2570] 2025-11-03T14:04:15.573027Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-11-03T14:04:15.573051Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:04:15.573111Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-11-03T14:04:15.573124Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:04:15.573145Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:04:15.573154Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-11-03T14:04:15.573160Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-11-03T14:04:15.573166Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-11-03T14:04:15.573171Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-11-03T14:04:15.573273Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-11-03T14:04:15.573278Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-11-03T14:04:15.573282Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-11-03T14:04:15.573287Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-11-03T14:04:15.573300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-11-03T14:04:15.573304Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-11-03T14:04:15.573311Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-11-03T14:04:15.573315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-11-03T14:04:15.573321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-11-03T14:04:15.573643Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269746185, Sender [1:693:2577], Recipient [1:674:2566]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-11-03T14:04:15.573655Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:04:15.586384Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:04:15.586424Z node 1 :TX_DATASHAR ... //session/3?node_id=2&id=NDEyNzcxZTktNDE4MDgxYjUtZjIzYTEwOTEtNmMxYjQ5N2E=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-11-03T14:04:17.966533Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1215: ActorId: [2:1145:2708] TxId: 281474976710676. Ctx: { TraceId: 01k950gvd63j5z20kkk5hetm74, Database: , SessionId: ydb://session/3?node_id=2&id=NDEyNzcxZTktNDE4MDgxYjUtZjIzYTEwOTEtNmMxYjQ5N2E=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-11-03T14:04:17.966540Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:1145:2708] TxId: 281474976710676. Ctx: { TraceId: 01k950gvd63j5z20kkk5hetm74, Database: , SessionId: ydb://session/3?node_id=2&id=NDEyNzcxZTktNDE4MDgxYjUtZjIzYTEwOTEtNmMxYjQ5N2E=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-11-03T14:04:17.966546Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710677. Resolved key sets: 0 2025-11-03T14:04:17.966569Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2987: SessionId: ydb://session/3?node_id=2&id=ZGZhYmNmOGUtOWFkOWFjODYtZWE5Yjk5OGEtY2JkYjQxMw==, ActorId: [2:897:2720], ActorState: ExecuteState, TraceId: 01k950gvd71p1cyt32ve90vkec, Create QueryResponse for error on request, msg: 2025-11-03T14:04:17.966605Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2987: SessionId: ydb://session/3?node_id=2&id=ZmIyNjA2MzYtNzM5ZjgyZWEtZjcxOGU0ZjQtYmUyMmQ3ZGM=, ActorId: [2:901:2724], ActorState: ExecuteState, TraceId: 01k950gvd7dat8edrab5xprbg4, Create QueryResponse for error on request, msg: 2025-11-03T14:04:17.967477Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710677. Ctx: { TraceId: 01k950gvd6e19kkvjjpd96mmwf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRmYmQ3OGQtNWUxZGI5OTQtZTNiZDU5YWMtODliNmM0MGQ=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:04:17.967493Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:558: TxId: 281474976710677. Ctx: { TraceId: 01k950gvd6e19kkvjjpd96mmwf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRmYmQ3OGQtNWUxZGI5OTQtZTNiZDU5YWMtODliNmM0MGQ=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-11-03T14:04:17.967501Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2681: ActorId: [2:1146:2711] TxId: 281474976710677. Ctx: { TraceId: 01k950gvd6e19kkvjjpd96mmwf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRmYmQ3OGQtNWUxZGI5OTQtZTNiZDU5YWMtODliNmM0MGQ=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-11-03T14:04:17.967514Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1215: ActorId: [2:1146:2711] TxId: 281474976710677. Ctx: { TraceId: 01k950gvd6e19kkvjjpd96mmwf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRmYmQ3OGQtNWUxZGI5OTQtZTNiZDU5YWMtODliNmM0MGQ=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-11-03T14:04:17.967522Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:1146:2711] TxId: 281474976710677. Ctx: { TraceId: 01k950gvd6e19kkvjjpd96mmwf, Database: , SessionId: ydb://session/3?node_id=2&id=ZDRmYmQ3OGQtNWUxZGI5OTQtZTNiZDU5YWMtODliNmM0MGQ=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-11-03T14:04:17.967818Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710678. Resolved key sets: 0 2025-11-03T14:04:17.967973Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710678. Ctx: { TraceId: 01k950gvd62y54fhjgtxzc82fe, Database: , SessionId: ydb://session/3?node_id=2&id=ZTFhNjg4MzQtNDVkYWY4Y2QtNWNmMDM3ZWUtY2ZjMTFlODI=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:04:17.967982Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:558: TxId: 281474976710678. Ctx: { TraceId: 01k950gvd62y54fhjgtxzc82fe, Database: , SessionId: ydb://session/3?node_id=2&id=ZTFhNjg4MzQtNDVkYWY4Y2QtNWNmMDM3ZWUtY2ZjMTFlODI=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-11-03T14:04:17.967988Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2681: ActorId: [2:1153:2713] TxId: 281474976710678. Ctx: { TraceId: 01k950gvd62y54fhjgtxzc82fe, Database: , SessionId: ydb://session/3?node_id=2&id=ZTFhNjg4MzQtNDVkYWY4Y2QtNWNmMDM3ZWUtY2ZjMTFlODI=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-11-03T14:04:17.967998Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1215: ActorId: [2:1153:2713] TxId: 281474976710678. Ctx: { TraceId: 01k950gvd62y54fhjgtxzc82fe, Database: , SessionId: ydb://session/3?node_id=2&id=ZTFhNjg4MzQtNDVkYWY4Y2QtNWNmMDM3ZWUtY2ZjMTFlODI=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-11-03T14:04:17.968005Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:1153:2713] TxId: 281474976710678. Ctx: { TraceId: 01k950gvd62y54fhjgtxzc82fe, Database: , SessionId: ydb://session/3?node_id=2&id=ZTFhNjg4MzQtNDVkYWY4Y2QtNWNmMDM3ZWUtY2ZjMTFlODI=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-11-03T14:04:17.968011Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710679. Resolved key sets: 0 2025-11-03T14:04:17.968179Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710679. Ctx: { TraceId: 01k950gvd7czsn3sreh0thw8ff, Database: , SessionId: ydb://session/3?node_id=2&id=ZjUxNjIzZDAtOWFmODI5NmQtNTM4Y2U5NjctNDA3Y2NkOGE=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:04:17.968186Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:558: TxId: 281474976710679. Ctx: { TraceId: 01k950gvd7czsn3sreh0thw8ff, Database: , SessionId: ydb://session/3?node_id=2&id=ZjUxNjIzZDAtOWFmODI5NmQtNTM4Y2U5NjctNDA3Y2NkOGE=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-11-03T14:04:17.968192Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2681: ActorId: [2:1154:2717] TxId: 281474976710679. Ctx: { TraceId: 01k950gvd7czsn3sreh0thw8ff, Database: , SessionId: ydb://session/3?node_id=2&id=ZjUxNjIzZDAtOWFmODI5NmQtNTM4Y2U5NjctNDA3Y2NkOGE=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-11-03T14:04:17.968200Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1215: ActorId: [2:1154:2717] TxId: 281474976710679. Ctx: { TraceId: 01k950gvd7czsn3sreh0thw8ff, Database: , SessionId: ydb://session/3?node_id=2&id=ZjUxNjIzZDAtOWFmODI5NmQtNTM4Y2U5NjctNDA3Y2NkOGE=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-11-03T14:04:17.968206Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:1154:2717] TxId: 281474976710679. Ctx: { TraceId: 01k950gvd7czsn3sreh0thw8ff, Database: , SessionId: ydb://session/3?node_id=2&id=ZjUxNjIzZDAtOWFmODI5NmQtNTM4Y2U5NjctNDA3Y2NkOGE=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-11-03T14:04:17.968212Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710680. Resolved key sets: 0 2025-11-03T14:04:17.968241Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710680. Ctx: { TraceId: 01k950gvd71p1cyt32ve90vkec, Database: , SessionId: ydb://session/3?node_id=2&id=ZGZhYmNmOGUtOWFkOWFjODYtZWE5Yjk5OGEtY2JkYjQxMw==, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:04:17.968245Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:558: TxId: 281474976710680. Ctx: { TraceId: 01k950gvd71p1cyt32ve90vkec, Database: , SessionId: ydb://session/3?node_id=2&id=ZGZhYmNmOGUtOWFkOWFjODYtZWE5Yjk5OGEtY2JkYjQxMw==, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-11-03T14:04:17.968251Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2681: ActorId: [2:1163:2720] TxId: 281474976710680. Ctx: { TraceId: 01k950gvd71p1cyt32ve90vkec, Database: , SessionId: ydb://session/3?node_id=2&id=ZGZhYmNmOGUtOWFkOWFjODYtZWE5Yjk5OGEtY2JkYjQxMw==, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-11-03T14:04:17.968257Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1215: ActorId: [2:1163:2720] TxId: 281474976710680. Ctx: { TraceId: 01k950gvd71p1cyt32ve90vkec, Database: , SessionId: ydb://session/3?node_id=2&id=ZGZhYmNmOGUtOWFkOWFjODYtZWE5Yjk5OGEtY2JkYjQxMw==, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-11-03T14:04:17.968263Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:1163:2720] TxId: 281474976710680. Ctx: { TraceId: 01k950gvd71p1cyt32ve90vkec, Database: , SessionId: ydb://session/3?node_id=2&id=ZGZhYmNmOGUtOWFkOWFjODYtZWE5Yjk5OGEtY2JkYjQxMw==, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-11-03T14:04:17.968317Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710681. Resolved key sets: 0 2025-11-03T14:04:17.968432Z node 2 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976710681. Ctx: { TraceId: 01k950gvd7dat8edrab5xprbg4, Database: , SessionId: ydb://session/3?node_id=2&id=ZmIyNjA2MzYtNzM5ZjgyZWEtZjcxOGU0ZjQtYmUyMmQ3ZGM=, PoolId: default, DatabaseId: /Root}. Database not set, use /Root 2025-11-03T14:04:17.968437Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:558: TxId: 281474976710681. Ctx: { TraceId: 01k950gvd7dat8edrab5xprbg4, Database: , SessionId: ydb://session/3?node_id=2&id=ZmIyNjA2MzYtNzM5ZjgyZWEtZjcxOGU0ZjQtYmUyMmQ3ZGM=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2025-11-03T14:04:17.968443Z node 2 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2681: ActorId: [2:1164:2724] TxId: 281474976710681. Ctx: { TraceId: 01k950gvd7dat8edrab5xprbg4, Database: , SessionId: ydb://session/3?node_id=2&id=ZmIyNjA2MzYtNzM5ZjgyZWEtZjcxOGU0ZjQtYmUyMmQ3ZGM=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-11-03T14:04:17.968451Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1215: ActorId: [2:1164:2724] TxId: 281474976710681. Ctx: { TraceId: 01k950gvd7dat8edrab5xprbg4, Database: , SessionId: ydb://session/3?node_id=2&id=ZmIyNjA2MzYtNzM5ZjgyZWEtZjcxOGU0ZjQtYmUyMmQ3ZGM=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-11-03T14:04:17.968457Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:1164:2724] TxId: 281474976710681. Ctx: { TraceId: 01k950gvd7dat8edrab5xprbg4, Database: , SessionId: ydb://session/3?node_id=2&id=ZmIyNjA2MzYtNzM5ZjgyZWEtZjcxOGU0ZjQtYmUyMmQ3ZGM=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 |56.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> Viewer::Cluster10000Tablets [GOOD] >> Viewer::FuzzySearcherLimit1OutOf4 [GOOD] >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation >> test.py::test[aggregate-group_by_full_path-default.txt-Results] [GOOD] >> test.py::test[aggregate-group_by_hop_only_start--Results] [SKIPPED] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column--Results] >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] >> test.py::test[aggregate-group_by_session_distinct_compact--Results] [GOOD] >> test.py::test[aggregate-library_error_in_aggregation_fail--Results] >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AllTypesColumns >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly_Multistatement [GOOD] |56.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> KqpScanArrowFormat::AggregateCountStar >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] Test command err: 2025-11-03T14:03:36.722810Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:36.755719Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:36.757839Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:529:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:36.757916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:36.757949Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0020fb/r3tmp/tmpTFsrEM/pdisk_1.dat 2025-11-03T14:03:36.874798Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:36.918770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:36.918800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:36.940298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63460, node 1 2025-11-03T14:03:37.003817Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:03:37.003836Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:03:37.003840Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:03:37.003913Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:03:37.006315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:37.031324Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:21671 2025-11-03T14:03:37.372195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-11-03T14:03:38.146449Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:38.148348Z node 3 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 3 2025-11-03T14:03:38.149248Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:38.157223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:38.157257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:38.190948Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-11-03T14:03:38.191759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:38.263875Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.263985Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.264103Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.264260Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.264303Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.264327Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.264352Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.264388Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.264409Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:03:38.365936Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:38.408268Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared1/.metadata/script_executions 2025-11-03T14:03:38.511633Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:38.511667Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:38.522827Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:38.561452Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:38.575068Z node 3 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-11-03T14:03:38.575105Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-11-03T14:03:38.582776Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-11-03T14:03:38.583211Z node 3 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-11-03T14:03:38.583240Z node 3 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-11-03T14:03:38.583246Z node 3 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-11-03T14:03:38.583252Z node 3 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-11-03T14:03:38.583258Z node 3 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-11-03T14:03:38.583264Z node 3 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-11-03T14:03:38.583271Z node 3 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-11-03T14:03:38.583635Z node 3 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-11-03T14:03:38.725440Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8310: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-11-03T14:03:38.725472Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8340: ConnectToSA(), pipe client id: [3:2073:2641], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-11-03T14:03:38.726617Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [3:2084:2646] 2025-11-03T14:03:38.726693Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2084:2646], schemeshard id = 72075186224037897 2025-11-03T14:03:38.739138Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [3:2101:2649] 2025-11-03T14:03:38.739444Z node 3 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared1 2025-11-03T14:03:38.741153Z node 3 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-11-03T14:03:38.741170Z node 3 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-11-03T14:03:38.741182Z node 3 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Shared1/.metadata/_statistics 2025-11-03T14:03:38.742655Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:306: Path does not belong to the specified domain: self# [3:2129:2661], domain# [OwnerId: 72057594046644480, LocalPathId: 2], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-11-03T14:03:38.743692Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:38.745690Z node 3 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-11-03T14:03:38.745722Z node 3 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-11-03T14:03:38.783753Z node 3 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2025-11-03T14:03:38.879535Z node 3 :STATISTICS DEBUG: table_creator.cpp:303: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-11-03T14:03:38.974067Z node 3 :STATISTICS DEBUG: table_creator.cpp:375: Table _statistics updater. Column diff is empty, finishing 2025-11-03T14:03:39.405882Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:39.509765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-11-03T14:03:40.316487Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFi ... 2 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224038895] EvFastPropagateCheck 2025-11-03T14:04:12.323225Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224038895] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-11-03T14:04:12.394987Z node 2 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [2:6483:3413], schemeshard count = 1 2025-11-03T14:04:12.968580Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8442: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2025-11-03T14:04:12.968609Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.746000s, at schemeshard: 72075186224037899 2025-11-03T14:04:12.968697Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2025-11-03T14:04:12.980558Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-11-03T14:04:13.205697Z node 3 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:6550:3533]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:13.205804Z node 3 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2025-11-03T14:04:13.205811Z node 3 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 29, ReplyToActorId = [3:6550:3533], StatRequests.size() = 1 2025-11-03T14:04:13.270601Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:304: [72075186224037894] EvPropagateTimeout 2025-11-03T14:04:13.270697Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-11-03T14:04:13.270704Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-11-03T14:04:13.270714Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-11-03T14:04:13.270720Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:752: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-11-03T14:04:13.270823Z node 3 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [3:6555:3535], ActorId: [3:6556:3536], Starting query actor #1 [3:6557:3537] 2025-11-03T14:04:13.270836Z node 3 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [3:6556:3536], ActorId: [3:6557:3537], Bootstrap. Database: /Root/Shared1, IsSystemUser: 1, run create session 2025-11-03T14:04:13.271678Z node 3 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [3:6556:3536], ActorId: [3:6557:3537], RunDataQuery with SessionId: ydb://session/3?node_id=3&id=MjQ2NGQ5ZWItODZiNmM3NWEtNGYyN2EzYWQtYWZjMzczNWQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-11-03T14:04:13.275905Z node 3 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [3:6556:3536], ActorId: [3:6557:3537], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=MjQ2NGQ5ZWItODZiNmM3NWEtNGYyN2EzYWQtYWZjMzczNWQ=, TxId: 2025-11-03T14:04:13.275931Z node 3 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [3:6556:3536], ActorId: [3:6557:3537], Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=MjQ2NGQ5ZWItODZiNmM3NWEtNGYyN2EzYWQtYWZjMzczNWQ=, TxId: 2025-11-03T14:04:13.276022Z node 3 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [3:6555:3535], ActorId: [3:6556:3536], Got response [3:6557:3537] SUCCESS 2025-11-03T14:04:13.276111Z node 3 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-11-03T14:04:13.293781Z node 3 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-11-03T14:04:13.293807Z node 3 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-11-03T14:04:14.847207Z node 3 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:6632:3566]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:14.847336Z node 3 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2025-11-03T14:04:14.847346Z node 3 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 30, ReplyToActorId = [3:6632:3566], StatRequests.size() = 1 2025-11-03T14:04:15.640597Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8442: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224038900 2025-11-03T14:04:15.640630Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 4.634000s, at schemeshard: 72075186224038900 2025-11-03T14:04:15.640716Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038900, stats byte size: 26, entries count: 1, are all stats full: 1 2025-11-03T14:04:15.660117Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2025-11-03T14:04:15.759147Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224038895] ScheduleNextTraversal 2025-11-03T14:04:15.759177Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-11-03T14:04:15.759186Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038900, LocalPathId: 2] is data table. 2025-11-03T14:04:15.759193Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:752: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038900, LocalPathId: 2] 2025-11-03T14:04:15.759306Z node 2 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [2:6652:3458], ActorId: [2:6653:3459], Starting query actor #1 [2:6654:3460] 2025-11-03T14:04:15.759319Z node 2 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [2:6653:3459], ActorId: [2:6654:3460], Bootstrap. Database: /Root/Shared2, IsSystemUser: 1, run create session 2025-11-03T14:04:15.760074Z node 2 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [2:6653:3459], ActorId: [2:6654:3460], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OWU4Y2JlNzAtY2UxNjM3N2ItNTcwZjViYjItZDg1NWRmNjg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-11-03T14:04:15.763898Z node 2 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [2:6653:3459], ActorId: [2:6654:3460], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWU4Y2JlNzAtY2UxNjM3N2ItNTcwZjViYjItZDg1NWRmNjg=, TxId: 2025-11-03T14:04:15.763924Z node 2 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [2:6653:3459], ActorId: [2:6654:3460], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWU4Y2JlNzAtY2UxNjM3N2ItNTcwZjViYjItZDg1NWRmNjg=, TxId: 2025-11-03T14:04:15.764024Z node 2 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [2:6652:3458], ActorId: [2:6653:3459], Got response [2:6654:3460] SUCCESS 2025-11-03T14:04:15.764094Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224038895] TTxFinishTraversal::Execute 2025-11-03T14:04:15.783784Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038900, LocalPathId: 2] 2025-11-03T14:04:15.783822Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-11-03T14:04:16.660288Z node 3 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [3:6718:3585]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:16.660434Z node 3 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2025-11-03T14:04:16.660446Z node 3 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 31, ReplyToActorId = [3:6718:3585], StatRequests.size() = 1 2025-11-03T14:04:16.715247Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 3 2025-11-03T14:04:16.715523Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 6 2025-11-03T14:04:16.715663Z node 3 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 3 cookie: 0 2025-11-03T14:04:16.715686Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2025-11-03T14:04:16.726497Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-11-03T14:04:16.726524Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-11-03T14:04:16.814826Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8442: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-11-03T14:04:16.814856Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-11-03T14:04:16.814980Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2025-11-03T14:04:16.827528Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-11-03T14:04:18.315161Z node 3 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [3:6766:3598]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:18.315273Z node 3 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2025-11-03T14:04:18.315281Z node 3 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 32, ReplyToActorId = [3:6766:3598], StatRequests.size() = 1 2025-11-03T14:04:18.315396Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6768:3497]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:18.316290Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-11-03T14:04:18.316370Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:177: [72075186224038895] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-11-03T14:04:18.316376Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-11-03T14:04:18.316418Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2025-11-03T14:04:18.316428Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [2:6768:3497], StatRequests.size() = 1 2025-11-03T14:04:18.316437Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224038895] EvPropagateStatisticsResponse, cookie: 18446744073709551615 >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly_Multistatement [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_3 >> KqpResultSetFormats::ArrowFormat_Stress >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] >> Viewer::JsonAutocompleteScheme [GOOD] >> Viewer::JsonAutocompleteEmptyColumns ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: 2025-11-03T14:04:16.811794Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:16.853295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:16.855976Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:16.856062Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:16.856102Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e11/r3tmp/tmpvPO2jN/pdisk_1.dat 2025-11-03T14:04:16.933354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:16.933395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:16.946570Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:16.947661Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178656355560 != 1762178656355564 2025-11-03T14:04:16.978934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:17.029490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:17.066041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:17.151779Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-11-03T14:04:17.151809Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-11-03T14:04:17.151830Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:650:2545] 2025-11-03T14:04:17.172263Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:650:2545] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-11-03T14:04:17.172315Z node 1 :TX_PROXY DEBUG: schemereq.cpp:616: Actor# [1:650:2545] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-11-03T14:04:17.172557Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1741: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-11-03T14:04:17.172574Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1731: Actor# [1:650:2545] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-11-03T14:04:17.172644Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1564: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-11-03T14:04:17.172697Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1611: Actor# [1:650:2545] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-11-03T14:04:17.172712Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:650:2545] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-11-03T14:04:17.172784Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1466: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvClientConnected 2025-11-03T14:04:17.173235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:17.173537Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1488: Actor# [1:650:2545] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-11-03T14:04:17.173552Z node 1 :TX_PROXY DEBUG: schemereq.cpp:596: Actor# [1:650:2545] txid# 281474976715657 SEND to# [1:590:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-11-03T14:04:17.191962Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:681:2569]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:04:17.192261Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:681:2569]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:04:17.192337Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:681:2569] 2025-11-03T14:04:17.192402Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:04:17.193581Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:667:2561], Recipient [1:685:2572]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:04:17.205862Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:681:2569]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:04:17.205974Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:667:2561], Recipient [1:685:2572]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:04:17.206094Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:685:2572] 2025-11-03T14:04:17.206137Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:04:17.207507Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:667:2561], Recipient [1:685:2572]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:04:17.207937Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:04:17.207967Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:04:17.208150Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:04:17.208161Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:04:17.208170Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:04:17.208231Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:04:17.208267Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:04:17.208281Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:712:2569] in generation 1 2025-11-03T14:04:17.208359Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:04:17.208376Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:04:17.208483Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-11-03T14:04:17.208491Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2025-11-03T14:04:17.208497Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2025-11-03T14:04:17.208527Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:04:17.208541Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:04:17.208551Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:713:2572] in generation 1 2025-11-03T14:04:17.218967Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:04:17.232081Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:04:17.232179Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:04:17.232215Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:716:2590] 2025-11-03T14:04:17.232222Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:04:17.232228Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:04:17.232234Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:04:17.232273Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:04:17.232282Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2025-11-03T14:04:17.232293Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:04:17.232302Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:717:2591] 2025-11-03T14:04:17.232307Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2025-11-03T14:04:17.232312Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-11-03T14:04:17.232316Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-11-03T14:04:17.232377Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:681:2569], Recipient [1:681:2569]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:04:17.232385Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvP ... ecutedNoMoreRestarts 2025-11-03T14:04:19.413224Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2025-11-03T14:04:19.413226Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2025-11-03T14:04:19.413229Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-11-03T14:04:19.413246Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-11-03T14:04:19.413249Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2025-11-03T14:04:19.413251Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-11-03T14:04:19.413254Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-11-03T14:04:19.413262Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:2] at 72075186224037888 is Executed 2025-11-03T14:04:19.413265Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-11-03T14:04:19.413268Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:2] at 72075186224037888 has finished 2025-11-03T14:04:19.423606Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2025-11-03T14:04:19.423634Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2025-11-03T14:04:19.423644Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2025-11-03T14:04:19.423673Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:04:19.424763Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:67:2114] Handle TEvProposeTransaction 2025-11-03T14:04:19.424782Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:67:2114] TxId# 281474976710661 ProcessProposeTransaction 2025-11-03T14:04:19.424797Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:272: actor# [2:67:2114] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [2:863:2681] DataReq marker# P0 2025-11-03T14:04:19.424826Z node 2 :TX_PROXY DEBUG: datareq.cpp:1330: Actor# [2:863:2681] Cookie# 0 txid# 281474976710661 HANDLE TDataReq marker# P1 2025-11-03T14:04:19.424900Z node 2 :TX_PROXY DEBUG: datareq.cpp:1467: Actor# [2:863:2681] txid# 281474976710661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2025-11-03T14:04:19.424944Z node 2 :TX_PROXY DEBUG: datareq.cpp:1620: Actor# [2:863:2681] txid# 281474976710661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-11-03T14:04:19.424970Z node 2 :TX_PROXY DEBUG: datareq.cpp:1204: Actor# [2:863:2681] txid# 281474976710661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2025-11-03T14:04:19.425063Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [2:863:2681], Recipient [2:675:2566]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 863 RawX2: 8589937273 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t_\003\000\000\000\000\000\000\021y\n\000\000\002\000\000\000" TxId: 281474976710661 ExecLevel: 0 Flags: 8 2025-11-03T14:04:19.425074Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-11-03T14:04:19.425120Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:04:19.425173Z node 2 :TX_DATASHARD TRACE: key_validator.cpp:33: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2025-11-03T14:04:19.425193Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710661] at 72075186224037888 on unit CheckDataTx 2025-11-03T14:04:19.425206Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710661] at 72075186224037888 is Executed 2025-11-03T14:04:19.425212Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710661] at 72075186224037888 executing on unit CheckDataTx 2025-11-03T14:04:19.425217Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-11-03T14:04:19.425222Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710661] at 72075186224037888 on unit BuildAndWaitDependencies 2025-11-03T14:04:19.425233Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-11-03T14:04:19.425246Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976710661] at 72075186224037888 2025-11-03T14:04:19.425253Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710661] at 72075186224037888 is Executed 2025-11-03T14:04:19.425257Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-11-03T14:04:19.425267Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710661] at 72075186224037888 to execution unit MakeScanSnapshot 2025-11-03T14:04:19.425271Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710661] at 72075186224037888 on unit MakeScanSnapshot 2025-11-03T14:04:19.425277Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710661] at 72075186224037888 is Executed 2025-11-03T14:04:19.425282Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710661] at 72075186224037888 executing on unit MakeScanSnapshot 2025-11-03T14:04:19.425286Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710661] at 72075186224037888 to execution unit WaitForStreamClearance 2025-11-03T14:04:19.425291Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710661] at 72075186224037888 on unit WaitForStreamClearance 2025-11-03T14:04:19.425301Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:99: Requested stream clearance from [2:863:2681] for [0:281474976710661] at 72075186224037888 2025-11-03T14:04:19.425307Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710661] at 72075186224037888 is Continue 2025-11-03T14:04:19.425324Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:04:19.425346Z node 2 :TX_PROXY DEBUG: datareq.cpp:2504: Got clearance request, shard: 72075186224037888, txid: 281474976710661 2025-11-03T14:04:19.425358Z node 2 :TX_PROXY DEBUG: datareq.cpp:2513: Collected all clerance requests, txid: 281474976710661 2025-11-03T14:04:19.425365Z node 2 :TX_PROXY DEBUG: datareq.cpp:2968: Send stream clearance, shard: 72075186224037888, txid: 281474976710661, cleared: 1 2025-11-03T14:04:19.425391Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287942, Sender [2:863:2681], Recipient [2:675:2566]: NKikimrTx.TEvStreamClearancePending TxId: 281474976710661 2025-11-03T14:04:19.425398Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-11-03T14:04:19.425416Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269287940, Sender [2:863:2681], Recipient [2:675:2566]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976710661 Cleared: true 2025-11-03T14:04:19.425421Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-11-03T14:04:19.425435Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [2:675:2566], Recipient [2:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:04:19.425440Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:04:19.425449Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:04:19.425457Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-11-03T14:04:19.425464Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976710661] at 72075186224037888 for WaitForStreamClearance 2025-11-03T14:04:19.425470Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710661] at 72075186224037888 on unit WaitForStreamClearance 2025-11-03T14:04:19.425475Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [0:281474976710661] at 72075186224037888 2025-11-03T14:04:19.425481Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710661] at 72075186224037888 is Executed 2025-11-03T14:04:19.425487Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976710661] at 72075186224037888 executing on unit WaitForStreamClearance 2025-11-03T14:04:19.425493Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976710661] at 72075186224037888 to execution unit ReadTableScan 2025-11-03T14:04:19.425498Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976710661] at 72075186224037888 on unit ReadTableScan 2025-11-03T14:04:19.425550Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976710661] at 72075186224037888 is Continue 2025-11-03T14:04:19.425555Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-11-03T14:04:19.425560Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2025-11-03T14:04:19.425565Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-11-03T14:04:19.425569Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2025-11-03T14:04:19.425580Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:04:19.425719Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435082, Sender [2:869:2686], Recipient [2:675:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-11-03T14:04:19.425726Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvPrivate::TEvRegisterScanActor >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation [GOOD] >> Viewer::FloatPointJsonQuery |56.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] Test command err: 2025-11-03T14:04:16.435423Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:16.496036Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:16.499307Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:16.499402Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:16.499445Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001e18/r3tmp/tmpYKVCk3/pdisk_1.dat 2025-11-03T14:04:16.576745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:16.576794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:16.590381Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:16.591409Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178655823840 != 1762178655823844 2025-11-03T14:04:16.626606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:16.676045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:16.709806Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:16.793976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:16.810616Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:04:16.810899Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:04:16.810955Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:674:2566] 2025-11-03T14:04:16.811020Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:04:16.821389Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:04:16.821604Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:04:16.821633Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:04:16.821811Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:04:16.821822Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:04:16.821829Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:04:16.821894Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:04:16.821922Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:04:16.821937Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-11-03T14:04:16.832289Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:04:16.837507Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:04:16.837601Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:04:16.837627Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-11-03T14:04:16.837633Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:04:16.837637Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:04:16.837642Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:04:16.837717Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:674:2566], Recipient [1:674:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:04:16.837723Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:04:16.837810Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:04:16.837831Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:04:16.837909Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:04:16.837935Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:04:16.837942Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-11-03T14:04:16.837948Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-11-03T14:04:16.837952Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-11-03T14:04:16.837958Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:04:16.837964Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:04:16.837979Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:681:2570], Recipient [1:674:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:04:16.837985Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:04:16.837992Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:673:2565], serverId# [1:681:2570], sessionId# [0:0:0] 2025-11-03T14:04:16.838004Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:681:2570] 2025-11-03T14:04:16.838089Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-11-03T14:04:16.838113Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:04:16.838171Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-11-03T14:04:16.838182Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:04:16.838202Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:04:16.838210Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-11-03T14:04:16.838215Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-11-03T14:04:16.838221Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-11-03T14:04:16.838226Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-11-03T14:04:16.838276Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-11-03T14:04:16.838281Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-11-03T14:04:16.838286Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-11-03T14:04:16.838290Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-11-03T14:04:16.838301Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-11-03T14:04:16.838305Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-11-03T14:04:16.838319Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-11-03T14:04:16.838324Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-11-03T14:04:16.838329Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-11-03T14:04:16.838620Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269746185, Sender [1:693:2577], Recipient [1:674:2566]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-11-03T14:04:16.838646Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-11-03T14:04:16.848969Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:04:16.848999Z node 1 :TX_DATASHAR ... WQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, waiting for 6 compute actor(s) and 0 datashard(s): CA [2:1045:2830], CA [2:1042:2827], CA [2:1046:2831], CA [2:1043:2828], CA [2:1044:2829], CA [2:1041:2826], 2025-11-03T14:04:19.545397Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:1041:2826], task: 3, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 60 DurationUs: 1000 Tasks { TaskId: 3 StageId: 2 CpuTimeUs: 16 FinishTimeMs: 1762178659545 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ComputeCpuTimeUs: 8 BuildCpuTimeUs: 8 HostName: "ghrun-shzehrlvom" NodeId: 2 StartTimeMs: 1762178659544 CreateTimeMs: 1762178659543 UpdateTimeMs: 1762178659545 } MaxMemoryUsage: 1048576 } 2025-11-03T14:04:19.545407Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [2:1041:2826] 2025-11-03T14:04:19.545416Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. Waiting for: CA [2:1045:2830], CA [2:1042:2827], CA [2:1046:2831], CA [2:1043:2828], CA [2:1044:2829], 2025-11-03T14:04:19.545422Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:163: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1045:2830], CA [2:1042:2827], CA [2:1046:2831], CA [2:1043:2828], CA [2:1044:2829], 2025-11-03T14:04:19.545505Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:1042:2827], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 218 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 172 FinishTimeMs: 1762178659545 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 150 BuildCpuTimeUs: 22 HostName: "ghrun-shzehrlvom" NodeId: 2 StartTimeMs: 1762178659544 CreateTimeMs: 1762178659543 UpdateTimeMs: 1762178659545 } MaxMemoryUsage: 1048576 } 2025-11-03T14:04:19.545513Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [2:1042:2827] 2025-11-03T14:04:19.545521Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. Waiting for: CA [2:1045:2830], CA [2:1046:2831], CA [2:1043:2828], CA [2:1044:2829], 2025-11-03T14:04:19.545527Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:163: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1045:2830], CA [2:1046:2831], CA [2:1043:2828], CA [2:1044:2829], 2025-11-03T14:04:19.545572Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:1043:2828], task: 4, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 270 DurationUs: 1000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 240 FinishTimeMs: 1762178659545 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 231 BuildCpuTimeUs: 9 HostName: "ghrun-shzehrlvom" NodeId: 2 StartTimeMs: 1762178659544 CreateTimeMs: 1762178659543 UpdateTimeMs: 1762178659545 } MaxMemoryUsage: 1048576 } 2025-11-03T14:04:19.545580Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [2:1043:2828] 2025-11-03T14:04:19.545586Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. Waiting for: CA [2:1045:2830], CA [2:1046:2831], CA [2:1044:2829], 2025-11-03T14:04:19.545592Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:163: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1045:2830], CA [2:1046:2831], CA [2:1044:2829], 2025-11-03T14:04:19.545613Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:1044:2829], task: 5, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 129 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 91 FinishTimeMs: 1762178659545 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 71 BuildCpuTimeUs: 20 HostName: "ghrun-shzehrlvom" NodeId: 2 StartTimeMs: 1762178659545 CreateTimeMs: 1762178659543 UpdateTimeMs: 1762178659545 } MaxMemoryUsage: 1048576 } 2025-11-03T14:04:19.545620Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [2:1044:2829] 2025-11-03T14:04:19.545626Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. Waiting for: CA [2:1045:2830], CA [2:1046:2831], 2025-11-03T14:04:19.545632Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:163: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1045:2830], CA [2:1046:2831], 2025-11-03T14:04:19.545670Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:1045:2830], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 68 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 33 FinishTimeMs: 1762178659545 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 19 BuildCpuTimeUs: 14 HostName: "ghrun-shzehrlvom" NodeId: 2 CreateTimeMs: 1762178659543 UpdateTimeMs: 1762178659545 } MaxMemoryUsage: 1048576 } 2025-11-03T14:04:19.545677Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [2:1045:2830] 2025-11-03T14:04:19.545683Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:699: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. Waiting for: CA [2:1046:2831], 2025-11-03T14:04:19.545688Z node 2 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:163: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1046:2831], 2025-11-03T14:04:19.545733Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:467: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. ActorState: ExecuteState, got execution state from compute actor: [2:1046:2831], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 67 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 29 FinishTimeMs: 1762178659545 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 20 BuildCpuTimeUs: 9 HostName: "ghrun-shzehrlvom" NodeId: 2 StartTimeMs: 1762178659545 CreateTimeMs: 1762178659543 UpdateTimeMs: 1762178659545 } MaxMemoryUsage: 1048576 } 2025-11-03T14:04:19.545741Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:721: TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. Compute actor has finished execution: [2:1046:2831] 2025-11-03T14:04:19.545792Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1215: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-11-03T14:04:19.545801Z node 2 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [2:1034:2808] TxId: 281474976710665. Ctx: { TraceId: 01k950gx4h98db8gdpx3t1yk4v, Database: , SessionId: ydb://session/3?node_id=2&id=ZTQ0YTg5YzctYmNmMmEzNmEtMTZiMDI0NWQtYmI2ZmM2YmI=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.001060s ReadRows: 2 ReadBytes: 16 ru: 2 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 >> KqpResultSetFormats::ArrowFormat_AllTypes-isOlap >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> Viewer::SelectStringWithBase64Encoding [GOOD] >> Viewer::QueryExecuteScript >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI |56.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |56.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> test.py::test[aggregate-library_error_in_aggregation_fail--Results] [GOOD] >> test.py::test[bigdate-table_io-default.txt-Results] >> KqpScanArrowFormat::AggregateCountStar [GOOD] >> KqpScanArrowFormat::AllTypesColumns >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey >> test.py::test[window-win_by_all_avg_interval-default.txt-Results] [GOOD] >> test.py::test[window-win_func_part_by_expr_new-default.txt-Results] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] >> Viewer::JsonAutocompleteEmptyColumns [GOOD] >> Viewer::JsonAutocompleteColumnsPOST >> Viewer::SelectStringWithNoBase64Encoding [GOOD] >> Viewer::ServerlessNodesPage >> KqpResultSetFormats::ArrowFormat_Types_Optional_3 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_4 |56.6%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> CommitOffset::Commit_WithSession_ToPastParentPartition [GOOD] >> CommitOffset::DistributedTxCommit >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit >> TopicTimestamp::TimestampRead_1MB_Topic_offset+middle_WithRestart [GOOD] >> TopicTimestamp::TimestampRead_40MB_Topic_exact_WithRestart ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:04:10.720368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:04:10.720400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:10.720409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:04:10.720418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:04:10.720427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:04:10.720433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:04:10.720446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:10.720468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:04:10.720606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:04:10.720675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:04:10.737236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:04:10.737265Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:10.740703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:04:10.740782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:04:10.740820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:04:10.742509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:04:10.742607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:04:10.742739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:10.742953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:10.743460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:10.743496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:04:10.743721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:10.743731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:10.743759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:04:10.743766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:10.743772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:04:10.743794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.745203Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-11-03T14:04:10.772124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:10.772217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.772279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:10.772287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:10.772338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:10.772353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:10.773270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:10.773319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:10.773374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.773385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:10.773391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:10.773397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:10.773898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.773913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:10.773923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:10.774308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.774320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.774326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:10.774342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:10.775190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:10.775628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:10.775669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:10.775893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:10.775923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:10.775931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:10.775996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:10.776003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:10.776045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:10.776058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:10.776550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:10.776560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_ ... eStep: 5000010 ParentPathId: 28 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 } ChildrenExist: true } Children { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table2" PathId: 35 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 24 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 29 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:20.409543Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-11-03T14:04:20.409561Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" took 19us result status StatusSuccess 2025-11-03T14:04:20.409611Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 35 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 24 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 35 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:20.409669Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-11-03T14:04:20.409684Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" took 15us result status StatusSuccess 2025-11-03T14:04:20.409733Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "Table3" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 24 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:20.409798Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-11-03T14:04:20.409815Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" took 18us result status StatusSuccess 2025-11-03T14:04:20.409860Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 24 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Viewer::FloatPointJsonQuery [GOOD] >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes >> KqpResultSetFormats::ArrowFormat_Stress [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_1 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] Test command err: 2025-11-03T14:04:06.947529Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499654611636284:2084];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:06.947722Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:04:06.950699Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:04:06.951709Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7568499656623152826:2163];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0025c2/r3tmp/tmpkGKQzS/pdisk_1.dat 2025-11-03T14:04:06.954102Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:04:06.954195Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:04:06.994785Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:06.995510Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:07.008727Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11494, node 1 2025-11-03T14:04:07.025446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ud3d/0025c2/r3tmp/yandexrSbnuz.tmp 2025-11-03T14:04:07.025462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ud3d/0025c2/r3tmp/yandexrSbnuz.tmp 2025-11-03T14:04:07.029797Z INFO: TTestServer started on Port 2991 GrpcPort 11494 TClient is connected to server localhost:2991 2025-11-03T14:04:07.049223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:07.049250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting PQClient connected to localhost:11494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-11-03T14:04:07.051532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:07.054568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:07.054591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:07.055915Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-11-03T14:04:07.056286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:07.067021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ud3d/0025c2/r3tmp/yandexrSbnuz.tmp 2025-11-03T14:04:07.067182Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:04:07.068007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:07.077390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-11-03T14:04:07.099533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-11-03T14:04:07.101570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... waiting... waiting... 2025-11-03T14:04:07.211983Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:07.234782Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:07.395934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499658906604547:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.395941Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499658906604573:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.395977Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.396753Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499658906604576:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.396801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.396898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:07.398814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499658906604606:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.398850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.398956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499658906604612:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.398967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.406287Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499658906604575:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-11-03T14:04:07.442349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.458496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.484594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-11-03T14:04:07.501901Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499658906605037:3015] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeR ... READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: user1_3_2_10819848107351977967_v1 2025-11-03T14:04:20.865077Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037898][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.865092Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037898][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.865096Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037898][Partition][1][StateIdle] Try persist 2025-11-03T14:04:20.865117Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037898][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.865120Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037898][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.865123Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037898][Partition][2][StateIdle] Try persist 2025-11-03T14:04:20.865180Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037899][Partition][3][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.865191Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037899][Partition][3][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.865193Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037899][Partition][3][StateIdle] Try persist 2025-11-03T14:04:20.865201Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037899][Partition][4][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.865203Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037899][Partition][4][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.865204Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037899][Partition][4][StateIdle] Try persist 2025-11-03T14:04:20.865214Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037900][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.865217Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037900][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.865218Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037900][Partition][0][StateIdle] Try persist 2025-11-03T14:04:20.918302Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037902][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.918324Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037902][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.918327Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037902][Partition][1][StateIdle] Try persist 2025-11-03T14:04:20.918350Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037902][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.918352Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037902][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.918354Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037902][Partition][2][StateIdle] Try persist 2025-11-03T14:04:20.918435Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037903][Partition][3][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.918451Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037903][Partition][3][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.918453Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037903][Partition][3][StateIdle] Try persist 2025-11-03T14:04:20.918467Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037903][Partition][4][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.918469Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037903][Partition][4][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.918471Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037903][Partition][4][StateIdle] Try persist 2025-11-03T14:04:20.918481Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037904][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.918483Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037904][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.918485Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037904][Partition][0][StateIdle] Try persist 2025-11-03T14:04:20.921479Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037894][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.921490Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037894][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.921493Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037894][Partition][1][StateIdle] Try persist 2025-11-03T14:04:20.921508Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037894][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.921510Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037894][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.921512Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037894][Partition][2][StateIdle] Try persist 2025-11-03T14:04:20.922310Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037896][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.922323Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037896][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.922326Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037896][Partition][0][StateIdle] Try persist 2025-11-03T14:04:20.922342Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037895][Partition][3][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.922345Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037895][Partition][3][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.922347Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037895][Partition][3][StateIdle] Try persist 2025-11-03T14:04:20.922354Z node 4 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037895][Partition][4][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.922355Z node 4 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037895][Partition][4][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.922358Z node 4 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037895][Partition][4][StateIdle] Try persist 2025-11-03T14:04:20.967530Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037898][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.967545Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037898][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.967548Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037898][Partition][1][StateIdle] Try persist 2025-11-03T14:04:20.967566Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037898][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:20.967568Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037898][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:20.967570Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037898][Partition][2][StateIdle] Try persist 2025-11-03T14:04:21.018684Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037902][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:21.018710Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037902][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:21.018713Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037902][Partition][1][StateIdle] Try persist 2025-11-03T14:04:21.018734Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037902][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:21.018736Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037902][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:21.018737Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037902][Partition][2][StateIdle] Try persist 2025-11-03T14:04:21.026830Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037894][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:21.026843Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037894][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:21.026846Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037894][Partition][1][StateIdle] Try persist 2025-11-03T14:04:21.026867Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037894][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:21.026870Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037894][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:21.026871Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037894][Partition][2][StateIdle] Try persist 2025-11-03T14:04:21.067356Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037898][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:21.067370Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037898][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:21.067372Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037898][Partition][1][StateIdle] Try persist 2025-11-03T14:04:21.067390Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037898][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:21.067392Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037898][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:21.067394Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037898][Partition][2][StateIdle] Try persist 2025-11-03T14:04:21.122688Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037902][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:21.122722Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037902][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:21.122725Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037902][Partition][1][StateIdle] Try persist 2025-11-03T14:04:21.122748Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037902][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:21.122752Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037902][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:21.122754Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037902][Partition][2][StateIdle] Try persist >> KqpResultSetFormats::ArrowFormat_AllTypes-isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_LargeTable >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase [GOOD] >> Viewer::JsonAutocompleteSchemePOST |56.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::Create >> KqpResultSetFormats::ArrowFormat_Types_Optional_4 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_5 |56.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest |56.7%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> Viewer::JsonAutocompleteColumnsPOST [GOOD] >> TReplicationWithRebootsTests::CreateInParallelWithoutInitialController >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::AggregateByColumn >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AggregateByColumn >> KqpResultSetFormats::ArrowFormat_Types_List_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyList >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] >> Viewer::CommitOffsetTest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumnsPOST [GOOD] Test command err: 2025-11-03T14:04:16.635264Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:16.671842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:16.673287Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:304:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:16.673324Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:16.673355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:16.743313Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:16.743353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:16.746943Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:16.747405Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178655815799 != 1762178655815803 2025-11-03T14:04:16.779335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8374, node 1 TClient is connected to server localhost:7155 2025-11-03T14:04:16.849142Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:16.849164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:16.849170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:16.849266Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":5,"Entities":[{"Name":"/Root/test","Type":"ext_sub_domain"},{"Name":"/Root/slice","Type":"ext_sub_domain"},{"Name":"/Root/qwerty","Type":"ext_sub_domain"},{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2025-11-03T14:04:17.989332Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:17.991648Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:17.992220Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:327:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:17.992298Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:17.992310Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:18.057340Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:18.057384Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:18.061467Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:18.061889Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:34:2081] 1762178657306683 != 1762178657306687 2025-11-03T14:04:18.093066Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21905, node 2 TClient is connected to server localhost:19226 2025-11-03T14:04:18.163464Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:18.163486Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:18.163492Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:18.163728Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":5,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"},{"Name":"/Root/test","Type":"ext_sub_domain"},{"Name":"/Root/slice","Type":"ext_sub_domain"},{"Name":"/Root/qwerty","Type":"ext_sub_domain"}]},"Version":2} 2025-11-03T14:04:19.283188Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:19.285050Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:19.286293Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:328:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:19.286379Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:19.286410Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:19.354347Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:19.354387Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:19.358206Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:19.359046Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [3:34:2081] 1762178658562424 != 1762178658562428 2025-11-03T14:04:19.395125Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1616, node 3 TClient is connected to server localhost:17453 2025-11-03T14:04:19.461891Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:19.461914Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:19.461919Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:19.462067Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":3,"Entities":[{"Name":"clients","Type":"table"},{"Name":"orders","Type":"table"},{"Name":"products","Type":"table"}]},"Version":2} 2025-11-03T14:04:20.713651Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:20.716803Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:304:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:20.716889Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:20.716905Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:20.796069Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:20.796304Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:20.796325Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:20.796846Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [4:34:2081] 1762178659954475 != 1762178659954479 2025-11-03T14:04:20.832044Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23737, node 4 TClient is connected to server localhost:9935 2025-11-03T14:04:20.912542Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:20.912563Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:20.912568Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:20.912674Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":3,"Entities":[{"Name":"id","Type":"column","Parent":"orders"},{"Name":"name","Type":"column","Parent":"orders"},{"Name":"description","Type":"column","Parent":"orders"}]},"Version":2} 2025-11-03T14:04:22.171450Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:22.179001Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:22.179647Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:327:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:22.179718Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:22.179743Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:22.254383Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:22.254422Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:22.258180Z node 5 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:22.258618Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [5:34:2081] 1762178661436165 != 1762178661436169 2025-11-03T14:04:22.295145Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27125, node 5 TClient is connected to server localhost:6358 2025-11-03T14:04:22.364962Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:22.364982Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:22.364987Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:22.365127Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":6,"Entities":[{"Name":"name","Type":"column","Parent":"orders"},{"Name":"name","Type":"column","Parent":"products"},{"Name":"id","Type":"column","Parent":"orders"},{"Name":"id","Type":"column","Parent":"products"},{"Name":"description","Type":"column","Parent":"orders"},{"Name":"description","Type":"column","Parent":"products"}]},"Version":2} >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData >> Viewer::JsonAutocompleteSchemePOST [GOOD] >> YdbTableSplit::MergeByNoLoadAfterSplit >> Viewer::ServerlessNodesPage [GOOD] >> Viewer::ServerlessWithExclusiveNodes >> KqpResultSetFormats::ArrowFormat_Types_Optional_5 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_2 >> YdbTableSplit::SplitByLoadWithDeletes >> YdbTableSplit::SplitByLoadWithReads >> YdbTableSplit::SplitByLoadWithUpdates >> KqpResultSetFormats::ArrowFormat_Types_EmptyList [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Dict_1 >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardCountersTest::PathsCounterDecrementsOnFail >> YdbTableSplit::RenameTablesAndSplit ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteSchemePOST [GOOD] Test command err: 2025-11-03T14:04:17.309630Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:17.346848Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:17.348042Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:304:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:17.348087Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:17.348122Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:17.417689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:17.417732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:17.421831Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:17.423311Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178656491344 != 1762178656491348 2025-11-03T14:04:17.457031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26172, node 1 TClient is connected to server localhost:29092 2025-11-03T14:04:17.527614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:17.527635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:17.527640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:17.527731Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":5,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"},{"Name":"/Root/test","Type":"ext_sub_domain"},{"Name":"/Root/slice","Type":"ext_sub_domain"},{"Name":"/Root/qwerty","Type":"ext_sub_domain"}]},"Version":2} 2025-11-03T14:04:18.652233Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:18.654751Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:18.655378Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:327:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:18.655456Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:18.655467Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:18.725754Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:18.725797Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:18.730181Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:18.731423Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:34:2081] 1762178657942037 != 1762178657942041 2025-11-03T14:04:18.764381Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7279, node 2 TClient is connected to server localhost:4903 2025-11-03T14:04:18.840277Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:18.840300Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:18.840306Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:18.840542Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":2,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2025-11-03T14:04:19.974391Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:19.976096Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:19.977366Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:328:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:19.977446Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:19.977472Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:20.050125Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:20.050169Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:20.054123Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:20.054621Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [3:34:2081] 1762178659284975 != 1762178659284979 2025-11-03T14:04:20.091441Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21968, node 3 TClient is connected to server localhost:8874 2025-11-03T14:04:20.162099Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:20.162122Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:20.162128Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:20.162349Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":2,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2025-11-03T14:04:21.461511Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:21.464506Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:304:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:21.464600Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:21.464616Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:21.534853Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:21.535097Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:21.535127Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:21.535643Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [4:34:2081] 1762178660655548 != 1762178660655552 2025-11-03T14:04:21.571206Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6239, node 4 TClient is connected to server localhost:65262 2025-11-03T14:04:21.638523Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:21.638540Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:21.638544Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:21.638726Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":2,"Entities":[{"Name":"/Root/MyDatabase","Type":"ext_sub_domain"},{"Name":"/Root/TestDatabase","Type":"ext_sub_domain"}]},"Version":2} 2025-11-03T14:04:22.938453Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:22.940792Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:22.941415Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:327:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:22.941483Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:22.941509Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:23.007456Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:23.007500Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:23.016677Z node 5 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:23.017208Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [5:34:2081] 1762178662215119 != 1762178662215123 2025-11-03T14:04:23.049366Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20858, node 5 TClient is connected to server localhost:19264 2025-11-03T14:04:23.114524Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:23.114547Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:23.114553Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:23.114784Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Success":true,"Result":{"Total":3,"Entities":[{"Name":"clients","Type":"table"},{"Name":"orders","Type":"table"},{"Name":"products","Type":"table"}]},"Version":2} >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase >> TSchemeShardCountersTest::PathsCounterDecrementsOnFail [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless >> test.py::test[join-left_only_semi_and_other--Results] [GOOD] >> test.py::test[join-left_only_with_other-off-Results] [SKIPPED] >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot >> KqpResultSetFormats::ArrowFormat_Types_List_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_3 >> KqpScanArrowInChanels::AggregateByColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumn >> Viewer::QueryExecuteScript [GOOD] >> Viewer::Plan2SvgOK >> CommitOffset::Commit_WithSession_ParentNotFinished_SameSession [GOOD] >> CommitOffset::Commit_WithSession_ParentNotFinished_OtherSession >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumn >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK >> KqpResultSetFormats::ArrowFormat_Types_Dict_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Dict_2 >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true |56.7%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[join-left_only_with_other-off-Results] [SKIPPED] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad >> Viewer::ServerlessWithExclusiveNodes [GOOD] >> Viewer::SharedDoesntShowExclusiveNodes >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false >> Viewer::Plan2SvgOK [GOOD] >> Viewer::Plan2SvgBad >> TopicTimestamp::TimestampRead_6MB_Topic_offset+middle_WithRestart [GOOD] |56.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |56.7%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut >> TopicTimestamp::TimestampRead_6MB_Topic_exact |56.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut >> KqpResultSetFormats::ArrowFormat_Types_List_3 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Tuple >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true >> KqpResultSetFormats::ArrowFormat_Types_Dict_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict >> test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps >> KqpResultSetFormats::ArrowFormat_Types_Tuple [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Struct >> TPersQueueMirrorerWithScaling::TestBasicRemote [GOOD] >> TPersQueueMirrorerWithScaling::TestQuotaLimitedGrow |56.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |56.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction >> KqpResultSetFormats::ArrowFormat_LargeTable [GOOD] >> KqpResultSetFormats::ArrowFormat_LargeTable_Limit >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig |56.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction >> Viewer::Plan2SvgBad [GOOD] >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2025-11-03T14:04:02.186267Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:02.186305Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:02.191440Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:02.191563Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:02.223451Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:02.444721Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:02.444753Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:02.448056Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:02.448222Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:02.469571Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:02.750409Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:02.750448Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:02.755474Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:02.755562Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:02.776919Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:02.777058Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2162], cookie=4538575365014729809, session=0, seqNo=0) 2025-11-03T14:04:02.777093Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:02.787850Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2162], cookie=4538575365014729809, session=1) 2025-11-03T14:04:02.787991Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:147:2169], cookie=18113967778467140907) 2025-11-03T14:04:02.788016Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:147:2169], cookie=18113967778467140907) 2025-11-03T14:04:03.214734Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:03.225720Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:03.560636Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:03.571348Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:03.916371Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:03.927308Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:04.272729Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:04.283471Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:04.649242Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:04.659915Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:05.005552Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:05.016289Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:05.351641Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:05.362566Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:05.727745Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:05.739062Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:06.092162Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:06.107181Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:06.504594Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:06.515706Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:06.876688Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:06.888251Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:07.254808Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:07.265889Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:07.641898Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:07.653128Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:08.026774Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:08.043132Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:08.451822Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:08.462773Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:08.820034Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:08.831017Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:09.194266Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:09.207019Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:09.564353Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:09.575366Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:09.938848Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:09.949708Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:10.342582Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:10.353902Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:10.702406Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:10.713467Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:11.082815Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:11.095858Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:11.447899Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:11.459026Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:11.844069Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:11.859104Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:12.226687Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:12.237976Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:12.611665Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:12.625117Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:12.993229Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:13.004184Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:13.375932Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:13.390978Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:13.786052Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:13.803033Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:14.297314Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:14.311060Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:14.716881Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:14.737120Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:15.153880Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:15.175123Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:15.586792Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:15.603104Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:16.007036Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:16.027252Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:16.418910Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:16.431024Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:16.787369Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:16.800920Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:17.194304Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:17.207353Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:17.582492Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:17.595618Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:17.973460Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:17.987112Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:18.398284Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:18.409409Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:18.794929Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:18.811430Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:19.206823Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:19.223203Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:19.606211Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:19.617528Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:19.985050Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:19.998707Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:20.483214Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:20.497105Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:20.882853Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:20.895030Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:21.326793Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:21.346955Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:21.724072Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:21.740190Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:22.134435Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:22.145889Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:22.559492Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:22.571725Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:22.943676Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:22.955205Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:23.333175Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:23.349956Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:23.718865Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:23.737399Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:24.118857Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:24.134285Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:24.524382Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:24.539384Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:24.946882Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:24.958976Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:25.334816Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:25.347612Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:25.754976Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:25.771040Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:26.189552Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:26.203037Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:26.670697Z node 3 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-11-03T14:04:26.670745Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-11-03T14:04:26.681875Z node 3 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-11-03T14:04:26.694956Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:537:2484], cookie=1669514628120844395) 2025-11-03T14:04:26.694997Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:537:2484], cookie=1669514628120844395) 2025-11-03T14:04:26.988350Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:26.988386Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:26.993354Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:26.993407Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:27.018490Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:27.020072Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:137:2162], cookie=10761550642608833218, path="Root", config={ MaxUnitsPerSecond: 100 }) 2025-11-03T14:04:27.020143Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2025-11-03T14:04:27.050517Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:137:2162], cookie=10761550642608833218) 2025-11-03T14:04:27.051106Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:147:2169]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:27.051125Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:147:2169], cookie=0) 2025-11-03T14:04:27.051194Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:149:2171]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-11-03T14:04:27.051201Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:149:2171], cookie=0) 2025-11-03T14:04:27.097149Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:149:2171]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-11-03T14:04:27.097194Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:147:2169]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-11-03T14:04:27.097278Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([4:152:2174]) 2025-11-03T14:04:27.097319Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:37: [72057594037927937] Send TEvResourcesAllocated to [4:149:2171]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2025-11-03T14:04:27.158930Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:147:2169]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad |56.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |56.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |56.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::Plan2SvgBad [GOOD] Test command err: Data has built Merge = 0.06174053018 Data has merged 2025-11-03T14:04:16.203113Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499700713066332:2150];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:16.203175Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:16.244305Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:16.259435Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3424, node 1 2025-11-03T14:04:16.270123Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:16.270134Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:16.270136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:16.270180Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:4134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:16.299094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:16.302570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-11-03T14:04:16.304369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:16.304407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:16.307137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:16.308926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-11-03T14:04:16.309482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-11-03T14:04:16.316013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-11-03T14:04:16.316519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-11-03T14:04:16.423220Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:16.720833Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499700713066932:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:16.720944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499700713066921:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:16.720959Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:16.721950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499700713066936:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:16.721958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:16.721970Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:16.725010Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499700713066935:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-11-03T14:04:16.791349Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499700713066988:2370] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:17.239719Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7568499705634039274:2072];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:17.240921Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:17.253459Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:17.254001Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7568499705634039240:2081] 1762178657239207 != 1762178657239210 2025-11-03T14:04:17.254066Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:17.255792Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:17.255817Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:17.256567Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25285, node 2 2025-11-03T14:04:17.266670Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:17.266682Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:17.266685Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:17.266748Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:17.286108Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called a ... oot 1 2025-11-03T14:04:21.714735Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:21.787087Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:21.787117Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:21.866927Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:21.866956Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:21.867666Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:04:21.930987Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:21.931016Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:21.976793Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:21.976817Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:22.058965Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:22.058998Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:22.128062Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:22.128095Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:22.169749Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:22.169775Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:22.227553Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:22.227578Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:22.311056Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:22.311087Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:22.387052Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:22.387083Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:22.455055Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:22.455092Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:22.515344Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:22.515375Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:22.590901Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:22.590931Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:22.662890Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:22.662923Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:22.665063Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:22.665502Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:22.665750Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:23.806939Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:23.806972Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:23.969489Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:23.969521Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:24.035127Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:24.035151Z node 5 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:24.146914Z node 5 :RPC_REQUEST WARN: rpc_stream_execute_scan_query.cpp:410: Client lost 2025-11-03T14:04:24.146992Z node 5 :KQP_EXECUTER ERROR: kqp_executer_impl.h:1013: ActorId: [5:7568499732186849376:2606] TxId: 281474976710698. Ctx: { TraceId: 01k950h1n0c4n8k0vcmychq7q5, Database: /Root, SessionId: ydb://session/3?node_id=5&id=Mjg1Y2U2ZWQtYWI2YzA2Y2YtMTNiYjgxMC02ZTZiZDdlOQ==, PoolId: default}. ABORTED: {
: Error: Client lost } 2025-11-03T14:04:24.147066Z node 5 :KQP_SESSION WARN: kqp_session_actor.cpp:2987: SessionId: ydb://session/3?node_id=5&id=Mjg1Y2U2ZWQtYWI2YzA2Y2YtMTNiYjgxMC02ZTZiZDdlOQ==, ActorId: [5:7568499732186849356:2606], ActorState: ExecuteState, TraceId: 01k950h1n0c4n8k0vcmychq7q5, Create QueryResponse for error on request, msg: 2025-11-03T14:04:24.147227Z node 5 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1762178664190, txId: 281474976710697] shutting down 2025-11-03T14:04:24.147304Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1210: SelfId: [5:7568499732186849381:2611], TxId: 281474976710698, task: 2. Ctx: { TraceId : 01k950h1n0c4n8k0vcmychq7q5. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=Mjg1Y2U2ZWQtYWI2YzA2Y2YtMTNiYjgxMC02ZTZiZDdlOQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7568499732186849376:2606], status: ABORTED, reason: {
: Error: Terminate execution } 2025-11-03T14:04:24.789037Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:24.789235Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:24.810398Z node 6 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:24.812365Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [6:7568499731932949156:2081] 1762178664765400 != 1762178664765403 2025-11-03T14:04:24.820090Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:24.820122Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 23651, node 6 2025-11-03T14:04:24.827201Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:24.851157Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:24.851181Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:24.851186Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:24.851279Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31445 2025-11-03T14:04:25.070453Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:25.382379Z node 6 :TICKET_PARSER ERROR: ticket_parser_impl.h:1008: Ticket **** (717F937C): Token is not supported test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:26.169203Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:26.169258Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:26.175660Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:26.175690Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:26.183177Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:26.184343Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:26.186946Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [7:7568499740895199240:2081] 1762178666086158 != 1762178666086161 TServer::EnableGrpc on GrpcPort 62796, node 7 2025-11-03T14:04:26.286907Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:26.286920Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:26.286925Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:26.286980Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17670 2025-11-03T14:04:26.421979Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:26.830897Z node 7 :TICKET_PARSER ERROR: ticket_parser_impl.h:1008: Ticket **** (717F937C): Token is not supported >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardInfoTypesTest::IndexBuildInfoAddParent [GOOD] >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest >> KqpResultSetFormats::ArrowFormat_Types_Struct [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Variant >> Viewer::SharedDoesntShowExclusiveNodes [GOOD] >> Viewer::ServerlessWithExclusiveNodesCheckTable ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict [GOOD] Test command err: Trying to start YDB, gRPC: 19254, MsgBus: 23904 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/003189/r3tmp/tmpcqw15B/pdisk_1.dat 2025-11-03T14:04:11.957411Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499676270216020:2153];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:11.957447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:04:12.000390Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 19254, node 1 2025-11-03T14:04:12.010744Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:12.012377Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499676270215886:2081] 1762178651955602 != 1762178651955605 2025-11-03T14:04:12.020973Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:12.020988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:12.020989Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:12.021033Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23904 2025-11-03T14:04:12.063471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:12.063498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:12.064493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-11-03T14:04:12.087768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:12.115233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.136512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.153866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.165308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.215160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:12.466670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499680565184827:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.466761Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.466943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499680565184837:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.466956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.476626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.484798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.494288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.509340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.524011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.545295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.557635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.574819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.599971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499680565185700:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.600009Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.600121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499680565185705:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.600123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499680565185706:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.600131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.600902Z node 1 :FLAT_TX_SCHEMESHARD ... PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:25.557437Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:25.558770Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-11-03T14:04:25.634218Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:25.906068Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7568499738199366305:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:25.906084Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7568499738199366334:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:25.906089Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:25.906178Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7568499738199366338:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:25.906197Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:25.906827Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:25.908950Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7568499738199366337:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-11-03T14:04:25.988202Z node 12 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [12:7568499738199366390:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=1; Trying to start YDB, gRPC: 32404, MsgBus: 27071 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/003189/r3tmp/tmpjoOWhd/pdisk_1.dat 2025-11-03T14:04:26.574751Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:26.574798Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:26.581564Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:26.581595Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:26.583258Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:26.583454Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32404, node 13 2025-11-03T14:04:26.584381Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [13:7568499744220690599:2081] 1762178666534526 != 1762178666534529 2025-11-03T14:04:26.589637Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:26.589648Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:26.589650Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:26.589691Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27071 TClient is connected to server localhost:27071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:26.683188Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:26.687468Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-11-03T14:04:26.734733Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:27.446841Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7568499748515658555:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:27.446893Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:27.450891Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7568499748515658582:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:27.450914Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7568499748515658583:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:27.450928Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:27.455386Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:27.458739Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-11-03T14:04:27.458857Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7568499748515658586:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-11-03T14:04:27.523733Z node 13 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [13:7568499748515658637:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:27.549838Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=120;columns=1; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] Test command err: 2025-11-03T14:03:21.810061Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499464535170513:2074];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:21.810377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:03:21.817473Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/002b83/r3tmp/tmp3qitZa/pdisk_1.dat 2025-11-03T14:03:21.859707Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:03:21.864678Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:21.864879Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499464535170478:2081] 1762178601809627 != 1762178601809630 TServer::EnableGrpc on GrpcPort 25312, node 1 2025-11-03T14:03:21.872146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ud3d/002b83/r3tmp/yandexA6wCls.tmp 2025-11-03T14:03:21.872159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ud3d/002b83/r3tmp/yandexA6wCls.tmp 2025-11-03T14:03:21.872226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ud3d/002b83/r3tmp/yandexA6wCls.tmp 2025-11-03T14:03:21.872283Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:03:21.875140Z INFO: TTestServer started on Port 4450 GrpcPort 25312 TClient is connected to server localhost:4450 PQClient connected to localhost:25312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:03:21.914054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:21.914079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:21.915136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:21.920518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-11-03T14:03:21.950318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... waiting... waiting... 2025-11-03T14:03:22.089268Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:22.253817Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499468830138610:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:22.253822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499468830138586:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:22.253847Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:22.253959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499468830138614:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:22.253988Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:22.254364Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499468830138643:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:22.254379Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:22.254448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499468830138646:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:22.254460Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:03:22.254676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:03:22.256579Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499468830138613:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-11-03T14:03:22.290443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:22.296638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:22.312470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-11-03T14:03:22.329643Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499468830138916:2590] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } === CheckClustersList. Subcribe to ClusterTracker from [1:7568499468830138975:2622] 2025-11-03T14:03:22.812439Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:03:26.810751Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7568499464535170513:2074];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:03:26.810788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-11-03T14:03:27.588925Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-11-03T14:03:27.595249Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2025-11-03T14:03:27.607060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "autoscalit-topic" TotalGroupCount: 5 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitCha ... Destroy direct read session test-consumer_10_2_4240258313044187396_v1 2025-11-03T14:04:27.423474Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:2746: [PQ: 72075186224037897] server disconnected, pipe [10:7568499747532173881:2712] destroyed 2025-11-03T14:04:27.423488Z node 10 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_10_2_4240258313044187396_v1 >>>>> Session-2 Release() >>>>> Session-2 Closing reading session 2025-11-03T14:04:27.435569Z :INFO: [/Root] [/Root] [414cce9-7cebde2d-2a9f6c7-fa2c9f23] Closing read session. Close timeout: 5.000000s 2025-11-03T14:04:27.435594Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:2:1:0:0 2025-11-03T14:04:27.435603Z :INFO: [/Root] [/Root] [414cce9-7cebde2d-2a9f6c7-fa2c9f23] Counters: { Errors: 0 CurrentSessionLifetimeMs: 51 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-11-03T14:04:27.435839Z :INFO: [/Root] [/Root] [414cce9-7cebde2d-2a9f6c7-fa2c9f23] Closing read session. Close timeout: 0.000000s 2025-11-03T14:04:27.435850Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:2:1:0:0 2025-11-03T14:04:27.435857Z :INFO: [/Root] [/Root] [414cce9-7cebde2d-2a9f6c7-fa2c9f23] Counters: { Errors: 0 CurrentSessionLifetimeMs: 51 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-11-03T14:04:27.435872Z :NOTICE: [/Root] [/Root] [414cce9-7cebde2d-2a9f6c7-fa2c9f23] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >>>>> Session-2 Received TSessionClosedEvent message SessionClosed { Status: SUCCESS Issues: "
: Error: Session was gracefully closed " } 2025-11-03T14:04:27.443081Z node 10 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 3 consumer test-consumer session test-consumer_10_3_5777997826058146681_v1 grpc read done: success# 0, data# { } 2025-11-03T14:04:27.443095Z node 10 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 3 consumer test-consumer session test-consumer_10_3_5777997826058146681_v1 grpc read failed 2025-11-03T14:04:27.443102Z node 10 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 3 consumer test-consumer session test-consumer_10_3_5777997826058146681_v1 grpc closed 2025-11-03T14:04:27.443119Z node 10 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 3 consumer test-consumer session test-consumer_10_3_5777997826058146681_v1 is DEAD 2025-11-03T14:04:27.444096Z node 10 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1659: [72075186224037893][test-topic] pipe [10:7568499747532173893:2717] disconnected. 2025-11-03T14:04:27.444110Z node 10 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1670: [72075186224037893][test-topic] pipe [10:7568499747532173893:2717] disconnected; active server actors: 1 2025-11-03T14:04:27.444114Z node 10 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1679: [72075186224037893][test-topic] pipe [10:7568499747532173893:2717] client test-consumer disconnected session test-consumer_10_3_5777997826058146681_v1 2025-11-03T14:04:27.444158Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:2258: [PQ: 72075186224037896] Destroy direct read session test-consumer_10_3_5777997826058146681_v1 2025-11-03T14:04:27.444166Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:2746: [PQ: 72075186224037896] server disconnected, pipe [10:7568499747532173896:2720] destroyed 2025-11-03T14:04:27.444181Z node 10 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_10_3_5777997826058146681_v1 2025-11-03T14:04:27.451181Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|1f705ad2-6b128840-f423571f-862a42f1_0] PartitionId [0] Generation [1] Write session: destroy 2025-11-03T14:04:27.470776Z node 10 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:157: [72075186224037893][test-topic] TPersQueueReadBalancer::HandleWakeup 2025-11-03T14:04:27.470801Z node 10 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:453: [72075186224037893][test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 5 2025-11-03T14:04:27.470808Z node 10 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:453: [72075186224037893][test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037897 Cookie: 6 2025-11-03T14:04:27.470813Z node 10 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:453: [72075186224037893][test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037896 Cookie: 7 2025-11-03T14:04:27.471103Z node 10 :PERSQUEUE DEBUG: partition.cpp:1017: [72075186224037892][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 3600 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 3 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-11-03T14:04:27.471181Z node 10 :PERSQUEUE DEBUG: partition.cpp:1017: [72075186224037897][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 3600 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 3 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-11-03T14:04:27.471228Z node 10 :PERSQUEUE DEBUG: partition.cpp:1017: [72075186224037896][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 3600 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 3 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-11-03T14:04:27.471270Z node 10 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: test-topic] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-11-03T14:04:27.471280Z node 10 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: test-topic] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-11-03T14:04:27.471285Z node 10 :PERSQUEUE_READ_BALANCER DEBUG: partition_scale_manager.cpp:39: [TPartitionScaleManager: test-topic] Handle HandleScaleStatusChange. Scale status: NORMAL 2025-11-03T14:04:27.471390Z node 10 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:563: [72075186224037893][test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 3 DataSize: 0 UsedReserveSize: 0 2025-11-03T14:04:27.471423Z node 10 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1849: [72075186224037893][test-topic] ProcessPendingStats. PendingUpdates size 3 2025-11-03T14:04:27.471502Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046644480, LocalPathId: 13] DataSize 0 UsedReserveSize 0 2025-11-03T14:04:27.478715Z node 10 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037897][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:27.478732Z node 10 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037897][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:27.478734Z node 10 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037897][Partition][1][StateIdle] Try persist 2025-11-03T14:04:27.478753Z node 10 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037896][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:27.478755Z node 10 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037896][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:27.478757Z node 10 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037896][Partition][2][StateIdle] Try persist 2025-11-03T14:04:27.514911Z node 10 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:27.514928Z node 10 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:27.514931Z node 10 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist 2025-11-03T14:04:27.574707Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-11-03T14:04:27.578702Z node 10 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037897][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:27.578718Z node 10 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037897][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:27.578721Z node 10 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037897][Partition][1][StateIdle] Try persist 2025-11-03T14:04:27.578746Z node 10 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037896][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:27.578748Z node 10 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037896][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:27.578750Z node 10 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037896][Partition][2][StateIdle] Try persist 2025-11-03T14:04:27.578795Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-11-03T14:04:27.618849Z node 10 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037892][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:27.618867Z node 10 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037892][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:27.618869Z node 10 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037892][Partition][0][StateIdle] Try persist >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest [GOOD] >> Viewer::JsonStorageListingV2 [GOOD] >> Viewer::JsonStorageListingV2GroupIdFilter >> test.py::test[window-win_func_part_by_expr_new-default.txt-Results] [GOOD] >> test.py::test[window-yql-15636-default.txt-Results] >> test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] >> test.py::test[aggregate-group_by_mul_gs_expr_and_column--Results] [GOOD] >> test.py::test[aggregate-group_by_mul_ru_ru--Results] >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Variant [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:04:10.631440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:04:10.631461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:10.631466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:04:10.631473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:04:10.631477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:04:10.631481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:04:10.631488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:10.631500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:04:10.631589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:04:10.631640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:04:10.643691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:04:10.643710Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:10.646342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:04:10.646400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:04:10.646436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:04:10.648153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:04:10.648223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:04:10.648298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:10.648490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:10.648912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:10.648951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:04:10.649123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:10.649130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:10.649150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:04:10.649155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:10.649159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:04:10.649176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.650232Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-11-03T14:04:10.665070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:10.665145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.665192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:10.665199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:10.665241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:10.665252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:10.665934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:10.665976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:10.666017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.666024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:10.666028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:10.666032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:10.666377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.666386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:10.666392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:10.666754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.666764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:10.666771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:10.666786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:10.667270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:10.667651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:10.667680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:10.667850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:10.667868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:10.667873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:10.667916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:10.667921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:10.667950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:10.667959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:10.668353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:10.668360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_ ... 37968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [12:128:2152] sender: [12:245:2058] recipient: [12:15:2062] 2025-11-03T14:04:28.868778Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:28.868855Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:28.868910Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:28.868919Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:28.868963Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:28.868974Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:28.869493Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:28.869531Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:28.869574Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:28.869587Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:28.869592Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:28.869598Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:28.869996Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:28.870007Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:28.870013Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:28.870336Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:28.870348Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:28.870354Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:28.870361Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:28.870391Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:28.870743Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:28.870781Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:28.870988Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:28.871012Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 51539609713 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:28.871019Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:28.871085Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:28.871094Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:28.871127Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:28.871144Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:28.871555Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:28.871566Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:28.871608Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:28.871614Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [12:212:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:28.871684Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:28.871692Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-11-03T14:04:28.871707Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-11-03T14:04:28.871712Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-11-03T14:04:28.871717Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-11-03T14:04:28.871721Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-11-03T14:04:28.871727Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-11-03T14:04:28.871732Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-11-03T14:04:28.871737Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-11-03T14:04:28.871742Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1:0 2025-11-03T14:04:28.871754Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-11-03T14:04:28.871761Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-11-03T14:04:28.871765Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-11-03T14:04:28.871871Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-11-03T14:04:28.871884Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-11-03T14:04:28.871889Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-11-03T14:04:28.871897Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-11-03T14:04:28.871902Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:28.871915Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-11-03T14:04:28.872493Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-11-03T14:04:28.872582Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:28.873336Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [12:275:2264] Bootstrap 2025-11-03T14:04:28.873566Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [12:275:2264] Become StateWork (SchemeCache [12:281:2270]) 2025-11-03T14:04:28.873675Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [12:275:2264] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:28.874030Z node 12 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 |56.7%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |56.7%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |56.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] Test command err: 2025-11-03T14:03:35.570089Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:03:35.605583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:03:35.608061Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:314:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:03:35.608161Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:03:35.608205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001d77/r3tmp/tmpnhpjil/pdisk_1.dat 2025-11-03T14:03:35.682120Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:03:35.682163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:03:35.695222Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:03:35.696345Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178615159469 != 1762178615159473 2025-11-03T14:03:35.727566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:03:35.775502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:03:35.808085Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:03:35.891197Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:67:2114] Handle TEvProposeTransaction 2025-11-03T14:03:35.891220Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:67:2114] TxId# 281474976715657 ProcessProposeTransaction 2025-11-03T14:03:35.891243Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:67:2114] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:650:2545] 2025-11-03T14:03:35.904298Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1676: Actor# [1:650:2545] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-11-03T14:03:35.904339Z node 1 :TX_PROXY DEBUG: schemereq.cpp:616: Actor# [1:650:2545] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-11-03T14:03:35.904538Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1741: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-11-03T14:03:35.904551Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1731: Actor# [1:650:2545] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-11-03T14:03:35.904617Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1564: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-11-03T14:03:35.904647Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1611: Actor# [1:650:2545] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-11-03T14:03:35.904660Z node 1 :TX_PROXY DEBUG: schemereq.cpp:103: Actor# [1:650:2545] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-11-03T14:03:35.904727Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1466: Actor# [1:650:2545] txid# 281474976715657 HANDLE EvClientConnected 2025-11-03T14:03:35.905115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:03:35.905351Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1488: Actor# [1:650:2545] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-11-03T14:03:35.905360Z node 1 :TX_PROXY DEBUG: schemereq.cpp:596: Actor# [1:650:2545] txid# 281474976715657 SEND to# [1:590:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-11-03T14:03:35.919413Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828672, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvBoot 2025-11-03T14:03:35.919616Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3123: StateInit, received event# 268828673, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvRestored 2025-11-03T14:03:35.919672Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:674:2566] 2025-11-03T14:03:35.919736Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2025-11-03T14:03:35.926793Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3136: StateInactive, received event# 268828684, Sender [1:666:2560], Recipient [1:674:2566]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:03:35.926976Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2025-11-03T14:03:35.927004Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2025-11-03T14:03:35.927134Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-11-03T14:03:35.927152Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2025-11-03T14:03:35.927157Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2025-11-03T14:03:35.927216Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2025-11-03T14:03:35.927236Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2025-11-03T14:03:35.927248Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:690:2566] in generation 1 2025-11-03T14:03:35.937620Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2025-11-03T14:03:35.948637Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2025-11-03T14:03:35.948733Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-11-03T14:03:35.948763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:692:2576] 2025-11-03T14:03:35.948770Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2025-11-03T14:03:35.948776Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-11-03T14:03:35.948783Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:03:35.948858Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435072, Sender [1:674:2566], Recipient [1:674:2566]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:35.948868Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3182: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-11-03T14:03:35.948988Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2025-11-03T14:03:35.949018Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-11-03T14:03:35.949132Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2025-11-03T14:03:35.949144Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-11-03T14:03:35.949153Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2025-11-03T14:03:35.949160Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-11-03T14:03:35.949165Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2025-11-03T14:03:35.949172Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2025-11-03T14:03:35.949179Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2025-11-03T14:03:35.949200Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [1:681:2570], Recipient [1:674:2566]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:35.949207Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:03:35.949216Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:673:2565], serverId# [1:681:2570], sessionId# [0:0:0] 2025-11-03T14:03:35.949234Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [1:398:2397], Recipient [1:681:2570] 2025-11-03T14:03:35.949240Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-11-03T14:03:35.949264Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:03:35.949334Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-11-03T14:03:35.949349Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-11-03T14:03:35.949371Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-11-03T14:03:35.949395Z node ... locks: Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true } Op: Rollback, immediate: 1 2025-11-03T14:04:29.039581Z node 13 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:1761: ActorId: [13:995:2685] TxId: 281474976715665. Ctx: { TraceId: 01k950h6ez2zm2ry6dxv3wt4e8, Database: , SessionId: ydb://session/3?node_id=13&id=NjI5MmEzNzQtMjExN2FkNjctOGZjOTNmMzEtM2ZlNGEwMTY=, PoolId: default, DatabaseId: /Root}. ExecuteDatashardTransaction traceId.verbosity: 0 2025-11-03T14:04:29.039592Z node 13 :KQP_EXECUTER INFO: kqp_data_executer.cpp:2681: ActorId: [13:995:2685] TxId: 281474976715665. Ctx: { TraceId: 01k950h6ez2zm2ry6dxv3wt4e8, Database: , SessionId: ydb://session/3?node_id=13&id=NjI5MmEzNzQtMjExN2FkNjctOGZjOTNmMzEtM2ZlNGEwMTY=, PoolId: default, DatabaseId: /Root}. Total tasks: 0, readonly: 1, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks: 0, useFollowers: 0 2025-11-03T14:04:29.039600Z node 13 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:142: ActorId: [13:995:2685] TxId: 281474976715665. Ctx: { TraceId: 01k950h6ez2zm2ry6dxv3wt4e8, Database: , SessionId: ydb://session/3?node_id=13&id=NjI5MmEzNzQtMjExN2FkNjctOGZjOTNmMzEtM2ZlNGEwMTY=, PoolId: default, DatabaseId: /Root}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2025-11-03T14:04:29.039608Z node 13 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:163: ActorId: [13:995:2685] TxId: 281474976715665. Ctx: { TraceId: 01k950h6ez2zm2ry6dxv3wt4e8, Database: , SessionId: ydb://session/3?node_id=13&id=NjI5MmEzNzQtMjExN2FkNjctOGZjOTNmMzEtM2ZlNGEwMTY=, PoolId: default, DatabaseId: /Root}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-11-03T14:04:29.039614Z node 13 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:2288: ActorId: [13:995:2685] TxId: 281474976715665. Ctx: { TraceId: 01k950h6ez2zm2ry6dxv3wt4e8, Database: , SessionId: ydb://session/3?node_id=13&id=NjI5MmEzNzQtMjExN2FkNjctOGZjOTNmMzEtM2ZlNGEwMTY=, PoolId: default, DatabaseId: /Root}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-11-03T14:04:29.039675Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269549568, Sender [13:995:2685], Recipient [13:964:2769]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 995 RawX2: 55834577533 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\001 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-11-03T14:04:29.039683Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3163: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-11-03T14:04:29.039716Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 2146435074, Sender [13:964:2769], Recipient [13:964:2769]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-11-03T14:04:29.039721Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3184: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-11-03T14:04:29.039740Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2025-11-03T14:04:29.039779Z node 13 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-11-03T14:04:29.039799Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-11-03T14:04:29.039811Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-11-03T14:04:29.039816Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-11-03T14:04:29.039822Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-11-03T14:04:29.039827Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-11-03T14:04:29.039837Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v400/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v300/18446744073709551615 ImmediateWriteEdge# v300/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2025-11-03T14:04:29.039849Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715665] at 72075186224037888 2025-11-03T14:04:29.039855Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-11-03T14:04:29.039860Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-11-03T14:04:29.039864Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715665] at 72075186224037888 to execution unit BlockFailPoint 2025-11-03T14:04:29.039868Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715665] at 72075186224037888 on unit BlockFailPoint 2025-11-03T14:04:29.039873Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-11-03T14:04:29.039877Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BlockFailPoint 2025-11-03T14:04:29.039884Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-11-03T14:04:29.039888Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2025-11-03T14:04:29.039906Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:263: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-11-03T14:04:29.039920Z node 13 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true 2025-11-03T14:04:29.039939Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:510: add locks to result: 0 2025-11-03T14:04:29.039954Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-11-03T14:04:29.039959Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-11-03T14:04:29.039963Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-11-03T14:04:29.039967Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-11-03T14:04:29.039979Z node 13 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-11-03T14:04:29.039998Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-11-03T14:04:29.040003Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-11-03T14:04:29.040007Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1932: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-11-03T14:04:29.040011Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1843: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-11-03T14:04:29.040026Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1878: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-11-03T14:04:29.040030Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1926: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-11-03T14:04:29.040035Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1938: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-11-03T14:04:29.040051Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2025-11-03T14:04:29.040057Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1949: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-11-03T14:04:29.040066Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-11-03T14:04:29.040108Z node 13 :KQP_EXECUTER DEBUG: kqp_data_executer.cpp:1417: ActorId: [13:995:2685] TxId: 281474976715665. Ctx: { TraceId: 01k950h6ez2zm2ry6dxv3wt4e8, Database: , SessionId: ydb://session/3?node_id=13&id=NjI5MmEzNzQtMjExN2FkNjctOGZjOTNmMzEtM2ZlNGEwMTY=, PoolId: default, DatabaseId: /Root}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-11-03T14:04:29.040129Z node 13 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:1215: ActorId: [13:995:2685] TxId: 281474976715665. Ctx: { TraceId: 01k950h6ez2zm2ry6dxv3wt4e8, Database: , SessionId: ydb://session/3?node_id=13&id=NjI5MmEzNzQtMjExN2FkNjctOGZjOTNmMzEtM2ZlNGEwMTY=, PoolId: default, DatabaseId: /Root}. terminate execution. 2025-11-03T14:04:29.040143Z node 13 :KQP_EXECUTER DEBUG: kqp_executer_impl.h:901: ActorId: [13:995:2685] TxId: 281474976715665. Ctx: { TraceId: 01k950h6ez2zm2ry6dxv3wt4e8, Database: , SessionId: ydb://session/3?node_id=13&id=NjI5MmEzNzQtMjExN2FkNjctOGZjOTNmMzEtM2ZlNGEwMTY=, PoolId: default, DatabaseId: /Root}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-11-03T14:04:29.040161Z node 13 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2943: SessionId: ydb://session/3?node_id=13&id=NjI5MmEzNzQtMjExN2FkNjctOGZjOTNmMzEtM2ZlNGEwMTY=, ActorId: [13:851:2685], ActorState: CleanupState, TraceId: 01k950h6ez2zm2ry6dxv3wt4e8, EndCleanup, isFinal: 0 2025-11-03T14:04:29.040217Z node 13 :KQP_SESSION DEBUG: kqp_session_actor.cpp:2675: SessionId: ydb://session/3?node_id=13&id=NjI5MmEzNzQtMjExN2FkNjctOGZjOTNmMzEtM2ZlNGEwMTY=, ActorId: [13:851:2685], ActorState: CleanupState, TraceId: 01k950h6ez2zm2ry6dxv3wt4e8, Sent query response back to proxy, proxyRequestId: 8, proxyId: [13:65:2112] 2025-11-03T14:04:29.326501Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269877761, Sender [13:1004:2795], Recipient [13:964:2769]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:04:29.326536Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:04:29.326549Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [13:1003:2794], serverId# [13:1004:2795], sessionId# [0:0:0] 2025-11-03T14:04:29.326583Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, received event# 269553224, Sender [13:590:2518], Recipient [13:964:2769]: NKikimr::TEvDataShard::TEvGetOpenTxs >> Viewer::JsonStorageListingV1 [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_Types_Variant [GOOD] Test command err: Trying to start YDB, gRPC: 14291, MsgBus: 12734 2025-11-03T14:04:12.280508Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499682273160876:2140];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:12.280589Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/003170/r3tmp/tmpQhM03j/pdisk_1.dat 2025-11-03T14:04:12.340476Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:12.340535Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:12.340673Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499682273160766:2081] 1762178652279611 != 1762178652279614 TServer::EnableGrpc on GrpcPort 14291, node 1 2025-11-03T14:04:12.349147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:12.349164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:12.349166Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:12.349205Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:12734 2025-11-03T14:04:12.385395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:12.385433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:12.391815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-11-03T14:04:12.424067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:12.464795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.490453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.517673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.531165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.603255Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:12.729029Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499682273162419:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.729054Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.729215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499682273162429:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.729227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.820958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.832392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.844864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.858423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.872309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.887924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.901006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.915768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.935373Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499682273163291:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.935433Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.935536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499682273163296:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.935551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499682273163297:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.935574Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.936642Z node 1 :FLAT_TX_SCHEMESHARD ... reateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:27.743708Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:27.751208Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-11-03T14:04:27.766872Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:28.108595Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7568499749296203936:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:28.108620Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:28.108809Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7568499749296203968:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:28.108815Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7568499749296203969:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:28.108833Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:28.109509Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:28.111829Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7568499749296203972:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:04:28.202739Z node 12 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [12:7568499749296204023:2341] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=264;columns=1; Trying to start YDB, gRPC: 4141, MsgBus: 25230 2025-11-03T14:04:28.803856Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:28.804470Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7568499749557785726:2276];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:28.804487Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/003170/r3tmp/tmpP0WdMF/pdisk_1.dat 2025-11-03T14:04:28.842032Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:28.842071Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:28.853204Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:28.855007Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:28.858710Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [13:7568499749557785464:2081] 1762178668789509 != 1762178668789512 TServer::EnableGrpc on GrpcPort 4141, node 13 2025-11-03T14:04:28.863570Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:28.863584Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:28.863586Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:28.863641Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:25230 2025-11-03T14:04:28.880204Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:25230 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:28.999817Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:29.005501Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-11-03T14:04:29.464294Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7568499753852753405:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:29.464325Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:29.464599Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7568499753852753440:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:29.464611Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7568499753852753441:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:29.464636Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:29.465759Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:29.469492Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-11-03T14:04:29.469619Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7568499753852753444:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:04:29.539736Z node 13 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [13:7568499753852753495:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=264;columns=1; |56.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |56.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |56.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred >> test_sql_negative.py::test[watermarks-bad_column-default.txt] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade >> test.py::test[bigdate-table_io-default.txt-Results] [GOOD] |56.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |56.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |56.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout >> CompDefrag::DelayedCompaction [GOOD] >> CompDefrag::ZeroThresholdDefragWithCompaction >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] |56.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |56.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |56.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] Test command err: 2025-11-03T14:04:13.114800Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499686929265801:2154];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:13.114845Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:04:13.121440Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:04:13.121965Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7568499687914151560:2086];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:13.122071Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/0025b7/r3tmp/tmpbv28G4/pdisk_1.dat 2025-11-03T14:04:13.124379Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:04:13.154947Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:13.166560Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:13.166833Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12592, node 1 2025-11-03T14:04:13.199570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ud3d/0025b7/r3tmp/yandexypUicH.tmp 2025-11-03T14:04:13.199587Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ud3d/0025b7/r3tmp/yandexypUicH.tmp 2025-11-03T14:04:13.199653Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ud3d/0025b7/r3tmp/yandexypUicH.tmp 2025-11-03T14:04:13.199711Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:04:13.205048Z INFO: TTestServer started on Port 9324 GrpcPort 12592 2025-11-03T14:04:13.217026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:13.217057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:9324 2025-11-03T14:04:13.219760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected PQClient connected to localhost:12592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-11-03T14:04:13.226846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:13.226883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:13.231043Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:13.234283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:13.234569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-11-03T14:04:13.272447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... waiting... waiting... 2025-11-03T14:04:13.348274Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:13.566093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499686929266710:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:13.566120Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:13.566246Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499686929266719:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:13.566257Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:13.566581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499686929266724:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:13.567532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:13.586820Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499686929266726:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720662 completed, doublechecking } 2025-11-03T14:04:13.635606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.659395Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499686929266856:2750] txid# 281474976720664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:13.702174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.704589Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7568499686929266893:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-11-03T14:04:13.704852Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2506: SessionId: ydb://session/3?node_id=1&id=MTcyYWM3LThlNGZmNmFlLTYxZmIxNDZmLWQ0Njg3MTUx, ActorId: [1:7568499686929266691:2323], ActorState: ExecuteState, TraceId: 01k950gqbya32385mpj4baqnbz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-11-03T14:04:13.705340Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-11-03T14:04:13.741012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-11-03T14:04:13.789497Z node 1 :KQP_EXECUTER ERROR: kqp_planner.cpp:123: TxId: 281474976720667. Ctx: { TraceId: 01k950gqj51y2ateytpdd9gvsw, Database: , SessionId: ydb://session/3?node_id=1&id=OWRhMzVkYjItYThkOWY ... 72075186224037900][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.747100Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037900][Partition][0][StateIdle] Try persist 2025-11-03T14:04:30.755146Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037896][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.755160Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037896][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.755163Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037896][Partition][0][StateIdle] Try persist 2025-11-03T14:04:30.755226Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037895][Partition][3][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.755230Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037895][Partition][3][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.755232Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037895][Partition][3][StateIdle] Try persist 2025-11-03T14:04:30.755268Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037895][Partition][4][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.755271Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037895][Partition][4][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.755273Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037895][Partition][4][StateIdle] Try persist 2025-11-03T14:04:30.815458Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037903][Partition][3][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.815471Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037903][Partition][3][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.815473Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037903][Partition][3][StateIdle] Try persist 2025-11-03T14:04:30.815488Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037903][Partition][4][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.815491Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037903][Partition][4][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.815494Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037903][Partition][4][StateIdle] Try persist 2025-11-03T14:04:30.815504Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037904][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.815545Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037904][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.815548Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037904][Partition][0][StateIdle] Try persist 2025-11-03T14:04:30.837591Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7568499714964531468:2142], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-11-03T14:04:30.837672Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [3:7568499714964531468:2142], cacheItem# { Subscriber: { Subscriber: [3:7568499719259500303:3055] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-11-03T14:04:30.837703Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [3:7568499757914208965:4987], recipient# [3:7568499757914208964:2815], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-11-03T14:04:30.844178Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037899][Partition][3][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.844195Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037899][Partition][3][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.844198Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037899][Partition][3][StateIdle] Try persist 2025-11-03T14:04:30.847234Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037899][Partition][4][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.847249Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037899][Partition][4][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.847252Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037899][Partition][4][StateIdle] Try persist 2025-11-03T14:04:30.847276Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037900][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.847277Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037900][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.847279Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037900][Partition][0][StateIdle] Try persist 2025-11-03T14:04:30.856479Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037896][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.856498Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037896][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.856501Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037896][Partition][0][StateIdle] Try persist 2025-11-03T14:04:30.856526Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037895][Partition][3][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.856529Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037895][Partition][3][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.856531Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037895][Partition][3][StateIdle] Try persist 2025-11-03T14:04:30.856539Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037895][Partition][4][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.856542Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037895][Partition][4][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.856544Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037895][Partition][4][StateIdle] Try persist 2025-11-03T14:04:30.915858Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037903][Partition][3][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.915884Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037903][Partition][3][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.915882Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037904][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.915887Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037903][Partition][3][StateIdle] Try persist 2025-11-03T14:04:30.915895Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037904][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.915898Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037904][Partition][0][StateIdle] Try persist 2025-11-03T14:04:30.915908Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037903][Partition][4][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.915910Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037903][Partition][4][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.915912Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037903][Partition][4][StateIdle] Try persist 2025-11-03T14:04:30.944451Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037899][Partition][3][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.944470Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037899][Partition][3][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.944473Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037899][Partition][3][StateIdle] Try persist 2025-11-03T14:04:30.947412Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037899][Partition][4][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.947427Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037899][Partition][4][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.947431Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037899][Partition][4][StateIdle] Try persist 2025-11-03T14:04:30.947454Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037900][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.947456Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037900][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.947458Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037900][Partition][0][StateIdle] Try persist 2025-11-03T14:04:30.956030Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037896][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.956050Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037896][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.956052Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037896][Partition][0][StateIdle] Try persist 2025-11-03T14:04:30.956071Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037895][Partition][3][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.956074Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037895][Partition][3][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.956075Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037895][Partition][3][StateIdle] Try persist 2025-11-03T14:04:30.956080Z node 3 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037895][Partition][4][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:30.956082Z node 3 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037895][Partition][4][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:30.956084Z node 3 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037895][Partition][4][StateIdle] Try persist >> KqpResultSetFormats::ArrowFormat_LargeTable_Limit [GOOD] >> KqpResultSetFormats::ArrowFormat_Returning+isOlap >> test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test |56.8%| [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part5/pytest >> test.py::test[bigdate-table_io-default.txt-Results] [GOOD] >> EvWrite::WriteInTransaction ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] Test command err: 2025-11-03T14:04:16.488836Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:16.490893Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499700579906414:2253];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:16.491059Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:16.536702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:16.536742Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:16.538195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:16.539490Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:16.539795Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499700579906196:2081] 1762178656483692 != 1762178656483695 TServer::EnableGrpc on GrpcPort 30759, node 1 2025-11-03T14:04:16.558879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:16.558890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:16.558891Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:16.558936Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5243 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-11-03T14:04:16.627153Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:16.632242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... waiting... 2025-11-03T14:04:16.641207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-11-03T14:04:16.641822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2025-11-03T14:04:16.648392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-11-03T14:04:16.652277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-11-03T14:04:16.993386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499700579906930:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:16.993429Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499700579906921:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:16.993456Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:16.993642Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499700579906936:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:16.993662Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:16.994444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:16.997766Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499700579906935:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-11-03T14:04:17.060357Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499704874874284:2370] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:17.522301Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7568499703807549701:2068];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:17.522414Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:04:17.534524Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:17.558591Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:17.562496Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:7568499703807549673:2081] 1762178657521893 != 1762178657521896 TServer::EnableGrpc on GrpcPort 24543, node 2 2025-11-03T14:04:17.580877Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:17.580890Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:17.580893Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:17.580946Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:04:17.590267Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:17058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:17.599762Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:17.602978Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txI ... esConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-11-03T14:04:22.764566Z node 5 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:416} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-11-03T14:04:22.864979Z node 5 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:354} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 26022, node 5 TClient is connected to server localhost:9027 2025-11-03T14:04:22.899676Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:22.899697Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:22.899703Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:22.899788Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Version":18,"TotalNodes":"1","FoundNodes":"1","FieldsAvailable":"10000000010000110111111100100111","FieldsRequired":"00000000000000000000000000100101","Problems":["no-database-board-info"],"Nodes":[{"NodeId":6,"Disconnected":true,"SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[135.02294921875,67.32568359375,27.7470703125],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-shzehrlvom.auto.internal","Version":".1dcac8e","Location":{"DataCenter":"2","Module":"2","Rack":"2","Unit":"2"},"CoresUsed":0,"CoresTotal":0,"RealNumberOfCpus":64}}]} 2025-11-03T14:04:24.414765Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:24.416427Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:24.416949Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:542:2419], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:24.416989Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:24.417015Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:24.566768Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:24.642136Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:24.642185Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:24.668053Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:24.739429Z node 7 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-11-03T14:04:24.759027Z node 7 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:416} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-11-03T14:04:24.986223Z node 7 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:354} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 17680, node 7 TClient is connected to server localhost:16623 2025-11-03T14:04:25.040708Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:25.040730Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:25.040736Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:25.040846Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Version":18,"TotalNodes":"1","FoundNodes":"1","FieldsAvailable":"10000000010000110111111100000111","FieldsRequired":"00000000000000000000000000000101","Nodes":[{"NodeId":9,"Disconnected":true,"SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[135.02294921875,67.32568359375,27.7470703125],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-shzehrlvom.auto.internal","Version":".1dcac8e","Location":{"DataCenter":"3","Module":"3","Rack":"3","Unit":"3"},"CoresUsed":0,"CoresTotal":0,"RealNumberOfCpus":64}}]} 2025-11-03T14:04:27.109747Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:27.111339Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:27.111850Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:541:2419], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:27.111875Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:27.111884Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:27.327496Z node 10 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:27.387048Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:27.387094Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:27.427223Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:27.490595Z node 10 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-11-03T14:04:27.526420Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:416} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-11-03T14:04:27.723125Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:354} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 2453, node 10 TClient is connected to server localhost:24757 2025-11-03T14:04:27.814130Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:27.814150Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:27.814155Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:27.814257Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Version":18,"TotalNodes":"1","FoundNodes":"1","FieldsAvailable":"10000000010000110111111100000111","FieldsRequired":"00000000000000000000000000000101","Nodes":[{"NodeId":11,"Disconnected":true,"SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[138.224609375,69.11328125,28.5380859375],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-shzehrlvom.auto.internal","Version":".1dcac8e","Location":{"DataCenter":"2","Module":"2","Rack":"2","Unit":"2"},"CoresUsed":0,"CoresTotal":0,"RealNumberOfCpus":64}}]} 2025-11-03T14:04:30.017047Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:30.019450Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:623:2420], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:30.019494Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:30.019506Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:30.256951Z node 13 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:30.307505Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:30.307556Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:30.361138Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:30.461118Z node 13 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-11-03T14:04:30.482873Z node 13 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:416} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-11-03T14:04:30.705966Z node 13 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:354} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 13688, node 13 TClient is connected to server localhost:5170 2025-11-03T14:04:30.798548Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:30.798571Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:30.798576Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:30.798781Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"Version":18,"TotalNodes":"2","FoundNodes":"2","FieldsAvailable":"10000000010100110111111100100111","FieldsRequired":"00000000000000000000000000100101","Nodes":[{"NodeId":15,"Disconnected":true,"SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[138.224609375,69.11328125,28.5380859375],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-shzehrlvom.auto.internal","Version":".1dcac8e","Location":{"DataCenter":"3","Module":"3","Rack":"3","Unit":"3"},"CoresUsed":0,"CoresTotal":0,"RealNumberOfCpus":64}},{"NodeId":16,"Disconnected":true,"SystemState":{"StartTime":"0","ChangeTime":"1","LoadAverage":[138.224609375,69.11328125,28.5380859375],"NumberOfCpus":64,"SystemState":"Green","Host":"ghrun-shzehrlvom.auto.internal","Version":".1dcac8e","Location":{"DataCenter":"4","Module":"4","Rack":"4","Unit":"4"},"CoresUsed":0,"CoresTotal":0,"RealNumberOfCpus":64},"Tablets":[{"Type":"DataShard","State":"Green","Count":1}]}]} >> KqpScanArrowFormat::AggregateWithFunction [GOOD] >> KqpScanArrowFormat::AggregateEmptySum >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] >> test_sql_streaming.py::test[hop-GroupByHop-default.txt] |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_negative.py::test[watermarks-bad_column-default.txt] [GOOD] >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] >> TopicDescribe::BasicStartOffset |56.8%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::WriteReadModifications >> TSchemeshardCompactionQueueTest::UpdateBelowThreshold [GOOD] >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] >> KqpScanArrowInChanels::JoinWithParams >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [GOOD] >> TStreamingQueryTest::DropStreamingQueryTwice |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] |56.8%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |56.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator >> TStreamingQueryTest::DropStreamingQueryTwice [GOOD] >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists >> KqpScanArrowFormat::AggregateEmptySum [GOOD] >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_negative.py::test[watermarks-bad_column-default.txt] [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition >> Viewer::TabletMerging [GOOD] >> Viewer::StorageGroupOutputWithoutFilterNoDepends >> TStreamingQueryTest::CreateStreamingQueryOrReplaceFailNameConflict >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] [GOOD] >> test_sql_streaming.py::test[hop-GroupByHop-default.txt] [GOOD] >> Viewer::CommitOffsetTest [GOOD] >> KqpScanArrowInChanels::JoinWithParams [GOOD] >> EvWrite::WriteWithLock >> TStreamingQueryTest::ParallelAlterStreamingQuery >> TStreamingQueryTest::CreateStreamingQueryOrReplaceFailNameConflict [GOOD] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] >> TStreamingQueryTest::ParallelAlterStreamingQuery [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors [GOOD] >> TStreamingQueryTest::ParallelCreateStreamingQuery >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] >> test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] >> Viewer::TenantInfo5kkTablets [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] [GOOD] >> Viewer::StorageGroupOutputWithoutFilterNoDepends [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] [GOOD] |56.8%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 8088, MsgBus: 29496 2025-11-03T14:04:12.189668Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499683236624456:2066];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:12.189684Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/003055/r3tmp/tmp3NNIOl/pdisk_1.dat 2025-11-03T14:04:12.242451Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:12.242542Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:12.242780Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499683236624430:2081] 1762178652189417 != 1762178652189420 TServer::EnableGrpc on GrpcPort 8088, node 1 2025-11-03T14:04:12.252628Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:12.252643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:12.252645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:12.252684Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29496 TClient is connected to server localhost:29496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-11-03T14:04:12.293658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:12.293689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:12.295028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:12.309103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:12.326185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... waiting... 2025-11-03T14:04:12.345628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.363767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.376707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.399124Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:12.620186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499683236626076:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.620219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.620452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499683236626086:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.620461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.677519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.687528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.698381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.712778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.726442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.741337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.756034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.769542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:12.788545Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499683236626947:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.788580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.788719Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499683236626952:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.788730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499683236626953:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.788747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.789773Z node 1 :FLAT_TX_SCHEMESHARD W ... , broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:34.104341Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:34.104344Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:34.104396Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:15470 TClient is connected to server localhost:15470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:34.171697Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:34.179819Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... waiting... 2025-11-03T14:04:34.198780Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:34.250765Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:34.276243Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:34.356609Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:34.602017Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7568499777991990035:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:34.602068Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:34.602290Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7568499777991990045:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:34.602299Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:34.619595Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:34.644513Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:34.659277Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:34.673459Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:34.688318Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:34.702332Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:34.717953Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:34.734893Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:34.783324Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7568499777991990906:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:34.783369Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:34.783622Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7568499777991990911:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:34.783636Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7568499777991990912:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:34.783663Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:34.785029Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:34.791074Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7568499777991990915:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-11-03T14:04:34.880916Z node 12 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [12:7568499777991990967:3555] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:35.071431Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:04:35.251010Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1762178675292, txId: 281474976710673] shutting down 2025-11-03T14:04:35.332154Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1762178675362, txId: 281474976710675] shutting down >> TStreamingQueryTest::ParallelCreateStreamingQuery [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] >> Viewer::UseTransactionWhenExecuteDataActionQuery >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelCreateStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:04:36.301235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:04:36.301260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:36.301268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:04:36.301277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:04:36.301283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:04:36.301288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:04:36.301299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:36.301325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:04:36.301452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:04:36.301538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:04:36.318299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:04:36.318331Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:36.322214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:04:36.322303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:04:36.322343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:04:36.325632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:04:36.325787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:04:36.325924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:36.326243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:36.327227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:36.327314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:04:36.327611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:36.327623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:36.327651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:04:36.327660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:36.327666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:04:36.327702Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:04:36.329653Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-11-03T14:04:36.356319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:36.356420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:36.356495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:36.356504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:36.356580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:36.356599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:36.358346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:36.358412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:36.358481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:36.358491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:36.358496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:36.358501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:36.359181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:36.359198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:36.359208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:36.359705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:36.359718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:36.359725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:36.359730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:36.360417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:36.360780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:36.360810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:36.360996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:36.361024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:36.361041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:36.361098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:36.361108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:36.361138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:36.361152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:36.361614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:36.361624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_ ... 2057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery1" took 69us result status StatusSuccess 2025-11-03T14:04:36.426453Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery1" PathDescription { Self { Name: "MyStreamingQuery1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery1" Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:36.426570Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-11-03T14:04:36.426585Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery2" took 32us result status StatusSuccess 2025-11-03T14:04:36.426644Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery2" PathDescription { Self { Name: "MyStreamingQuery2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery2" Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:36.426734Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-11-03T14:04:36.426751Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 20us result status StatusSuccess 2025-11-03T14:04:36.426849Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "MyStreamingQuery1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyStreamingQuery2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:36.426920Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-11-03T14:04:36.426938Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery1" took 20us result status StatusSuccess 2025-11-03T14:04:36.426988Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery1" PathDescription { Self { Name: "MyStreamingQuery1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery1" Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:36.427038Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-11-03T14:04:36.427053Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery2" took 18us result status StatusSuccess 2025-11-03T14:04:36.427105Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery2" PathDescription { Self { Name: "MyStreamingQuery2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery2" Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:04:34.351484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:04:34.351513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:34.351519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:04:34.351528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:04:34.351536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:04:34.351541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:04:34.351551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:34.351567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:04:34.351688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:04:34.351763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:04:34.367863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:04:34.367890Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:34.371484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:04:34.371567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:04:34.371607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:04:34.373031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:04:34.373120Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:04:34.373220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:34.373398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:34.373850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:34.373895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:04:34.374109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:34.374117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:34.374137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:04:34.374143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:34.374148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:04:34.374173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.375388Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-11-03T14:04:34.405817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:34.405914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.405990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:34.406000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:34.406079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:34.406097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:34.412311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:34.412386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:34.412477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.412494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:34.412501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:34.412508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:34.414321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.414342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:34.414349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:34.415081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.415101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.415110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:34.415120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:34.416035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:34.416692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:34.416760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:34.417021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:34.417059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:34.417080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:34.417153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:34.417162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:34.417202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:34.417214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:34.420865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:34.420882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_ ... X_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.664217Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:34.664226Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:34.664530Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.664541Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.664547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:34.664553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:34.664585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:34.664870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:34.664906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:34.665106Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:34.665132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 8589936753 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:34.665140Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:34.665206Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:34.665214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:34.665246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:34.665259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:34.665694Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:34.665705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:34.665748Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:34.665757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-11-03T14:04:34.665818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.665826Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-11-03T14:04:34.665840Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-11-03T14:04:34.665846Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-11-03T14:04:34.665851Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-11-03T14:04:34.665855Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-11-03T14:04:34.665861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-11-03T14:04:34.665867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-11-03T14:04:34.665873Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-11-03T14:04:34.665878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1:0 2025-11-03T14:04:34.665891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-11-03T14:04:34.665898Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-11-03T14:04:34.665903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-11-03T14:04:34.666013Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-11-03T14:04:34.666026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-11-03T14:04:34.666031Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-11-03T14:04:34.666037Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-11-03T14:04:34.666041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:34.666056Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-11-03T14:04:34.666618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-11-03T14:04:34.666725Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-11-03T14:04:34.667600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropStreamingQuery Drop { Name: "MyStreamingQuery" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:34.667637Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_streaming_query.cpp:182: [72057594046678944] TDropStreamingQuery Propose: opId# 101:0, path# /MyRoot/MyStreamingQuery 2025-11-03T14:04:34.667660Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_streaming_query.cpp:103, at schemeshard: 72057594046678944 2025-11-03T14:04:34.667734Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:274:2264] Bootstrap 2025-11-03T14:04:34.667954Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:274:2264] Become StateWork (SchemeCache [2:279:2269]) 2025-11-03T14:04:34.668130Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:274:2264] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:34.668591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_streaming_query.cpp:103" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:34.668636Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), source_location: ydb/core/tx/schemeshard/schemeshard__operation_drop_streaming_query.cpp:103, operation: DROP STREAMING QUERY, path: MyStreamingQuery 2025-11-03T14:04:34.668743Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-11-03T14:04:34.668793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-11-03T14:04:34.668801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-11-03T14:04:34.668854Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-11-03T14:04:34.668872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-11-03T14:04:34.668877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:289:2279] TestWaitNotification: OK eventTxId 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelAlterStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:04:35.826488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:04:35.826516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:35.826524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:04:35.826534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:04:35.826542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:04:35.826546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:04:35.826555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:35.826584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:04:35.826734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:04:35.826805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:04:35.848315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:04:35.848340Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:35.852266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:04:35.852355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:04:35.852398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:04:35.854078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:04:35.854180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:04:35.854294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:35.854478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:35.855238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:35.855298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:04:35.855621Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:35.855640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:35.855672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:04:35.855683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:35.855691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:04:35.855729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:04:35.863398Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-11-03T14:04:35.884696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:35.884791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:35.884869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:35.884879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:35.884961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:35.884981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:35.885914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:35.885976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:35.886054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:35.886067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:35.886074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:35.886081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:35.886539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:35.886554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:35.886576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:35.886984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:35.886999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:35.887006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:35.887014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:35.887788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:35.888651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:35.888717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:35.888964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:35.888998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:35.889017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:35.889083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:35.889093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:35.889131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:35.889147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:35.889607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:35.889619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_ ... r for txId 113: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 119, at schemeshard: 72057594046678944 2025-11-03T14:04:35.937107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2025-11-03T14:04:35.937120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2025-11-03T14:04:35.937130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2025-11-03T14:04:35.937163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2025-11-03T14:04:35.937172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2025-11-03T14:04:35.937204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-11-03T14:04:35.937240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-11-03T14:04:35.937249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-11-03T14:04:35.937317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2025-11-03T14:04:35.937325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-11-03T14:04:35.937387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2025-11-03T14:04:35.937440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2025-11-03T14:04:35.937470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:391:2381] 2025-11-03T14:04:35.937599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2025-11-03T14:04:35.937603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:391:2381] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 2025-11-03T14:04:35.938130Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-11-03T14:04:35.938183Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 73us result status StatusSuccess 2025-11-03T14:04:35.938296Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } Properties { key: "run" value: "true" } } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadModifications Test command err: 2025-11-03T14:04:33.697189Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:107;event=initialize_shard;step=OnActivateExecutor; 2025-11-03T14:04:33.702080Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:126;event=initialize_shard;step=initialize_tiring_finished; 2025-11-03T14:04:33.702153Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-11-03T14:04:33.703003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:04:33.703061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:04:33.703105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:04:33.703126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:04:33.703146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:04:33.703165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:04:33.703186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:04:33.703213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:04:33.703232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:04:33.703251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:04:33.703271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:04:33.703291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:04:33.703310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:04:33.712308Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-11-03T14:04:33.712392Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-11-03T14:04:33.712404Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-11-03T14:04:33.712444Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-11-03T14:04:33.712494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:04:33.712509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:04:33.712517Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-11-03T14:04:33.712529Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-11-03T14:04:33.712560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:04:33.712569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:04:33.712574Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-11-03T14:04:33.712606Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-11-03T14:04:33.712618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:04:33.712627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:04:33.712632Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-11-03T14:04:33.712645Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-11-03T14:04:33.712654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:04:33.712664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:04:33.712671Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-11-03T14:04:33.712686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:04:33.712697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:04:33.712704Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-11-03T14:04:33.712721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:04:33.712732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:04:33.712740Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-11-03T14:04:33.712772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:04:33.712781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:04:33.712787Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-11-03T14:04:33.712804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:04:33.712812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:04:33.712817Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-11-03T14:04:33.712825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:04:33.712834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:04:33.712839Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-11-03T14:04:33.712848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:04:33.712856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:04:33.712863Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-11-03T14:04:33.712880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:04:33.712889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecPlain } } } TtlSettings { Version: 1 } } OwnerPath: "/Root/olap" } } 2025-11-03T14:04:33.954867Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Complete;fline=columnshard__init.cpp:158;step=TTxUpdateSchema.Complete; 2025-11-03T14:04:33.954923Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxInit.Execute at tablet 9437184 2025-11-03T14:04:33.955386Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=4; 2025-11-03T14:04:33.955447Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:special_valuesLoadingTime=41; 2025-11-03T14:04:33.955484Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:special_valuesLoadingTime=29; 2025-11-03T14:04:33.955524Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tables_managerLoadingTime=32; 2025-11-03T14:04:33.955553Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;TablesLoadingTime=5; 2025-11-03T14:04:33.955564Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;SchemaPresetLoadingTime=5; 2025-11-03T14:04:33.955575Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;TableVersionssLoadingTime=5; 2025-11-03T14:04:33.955587Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2025-11-03T14:04:33.955600Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tables_managerLoadingTime=68; 2025-11-03T14:04:33.955616Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=7; 2025-11-03T14:04:33.955632Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=9; 2025-11-03T14:04:33.955650Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=10; 2025-11-03T14:04:33.955665Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=6; 2025-11-03T14:04:33.955691Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=17; 2025-11-03T14:04:33.955763Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=63; 2025-11-03T14:04:33.955774Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=2; 2025-11-03T14:04:33.955783Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-11-03T14:04:33.955793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-11-03T14:04:33.955813Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=9; 2025-11-03T14:04:33.955950Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-11-03T14:04:33.955966Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=8; 2025-11-03T14:04:33.955973Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-11-03T14:04:33.955986Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=6; 2025-11-03T14:04:33.955996Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=4; 2025-11-03T14:04:33.956004Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=2; 2025-11-03T14:04:33.956009Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=607; 2025-11-03T14:04:33.956047Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-11-03T14:04:33.956058Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-11-03T14:04:33.956071Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];process=SwitchToWork;fline=columnshard_impl.cpp:1531;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-11-03T14:04:33.956176Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-11-03T14:04:33.956185Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:455;problem=Background activities cannot be started: no index at tablet; 2025-11-03T14:04:33.956278Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:265;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-11-03T14:04:33.956295Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:256;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-11-03T14:04:33.956301Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-11-03T14:04:33.956307Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:455;problem=Background activities cannot be started: no index at tablet; VERIFY failed (2025-11-03T14:04:33.956329Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0xF5BEFB6) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0xF5B5857) NKikimr::NTabletPipe::TClient::TClient(NActors::TActorId const&, unsigned long, NKikimr::NTabletPipe::TClientConfig const&)+435 (0x1183EF03) NKikimr::NTabletPipe::CreateClient(NActors::TActorId const&, unsigned long, NKikimr::NTabletPipe::TClientConfig const&)+50 (0x1183ED22) NKikimr::NColumnShard::TColumnShard::Handle(TAutoPtr, TDelete>&)+284 (0x1AD9781C) NKikimr::NColumnShard::TColumnShard::StateWork(TAutoPtr&)+5290 (0x1AD8F3FA) NActors::IActor::Receive(TAutoPtr&)+216 (0xFD2C008) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+1389 (0x1D0AE7ED) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+4931 (0x1D0AB843) NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration)+473 (0x1D0AF9D9) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration)+67 (0x1D0BF663) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration)+102 (0x1D0BF426) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+24 (0x1D0B2228) NActors::TTestActorRuntimeBase::SimulateSleep(TDuration)+419 (0x1D0B2133) NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, unsigned long)+216 (0x1EFDED98) NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, NKikimr::NColumnShard::TestTableDescription const&, TBasicString>, unsigned long)+816 (0x1EFDF150) NKikimr::NTestSuiteTColumnShardTestReadWrite::TTestCaseWriteReadModifications::Execute_(NUnitTest::TTestContext&)+1187 (0xF3D8C53) NKikimr::NTestSuiteTColumnShardTestReadWrite::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xF426AF7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+120 (0xF75EDF8) NKikimr::NTestSuiteTColumnShardTestReadWrite::TCurrentTest::Execute()+481 (0xF426481) NUnitTest::TTestFactory::Execute()+817 (0xF75F541) NUnitTest::RunMain(int, char**)+3229 (0xF76DCDD) ??+0 (0x7F33D5DCAD90) __libc_start_main+128 (0x7F33D5DCAE40) _start+41 (0xE551029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot Test command err: 2025-11-03T14:04:33.775768Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:107;event=initialize_shard;step=OnActivateExecutor; 2025-11-03T14:04:33.779267Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];fline=columnshard.cpp:126;event=initialize_shard;step=initialize_tiring_finished; 2025-11-03T14:04:33.779309Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-11-03T14:04:33.779858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:04:33.779893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:04:33.779922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:04:33.779937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:04:33.779949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:04:33.779966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:04:33.779979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:04:33.779992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:04:33.780007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:04:33.780020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:04:33.780033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:04:33.780046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:04:33.780061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:04:33.786250Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-11-03T14:04:33.786348Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-11-03T14:04:33.786359Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-11-03T14:04:33.786403Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-11-03T14:04:33.786459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:04:33.786475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:04:33.786481Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-11-03T14:04:33.786492Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-11-03T14:04:33.786502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:04:33.786511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:04:33.786515Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-11-03T14:04:33.786538Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-11-03T14:04:33.786548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:04:33.786556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:04:33.786561Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-11-03T14:04:33.786572Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-11-03T14:04:33.786595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:04:33.786604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:04:33.786608Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-11-03T14:04:33.786618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:04:33.786644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:04:33.786650Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-11-03T14:04:33.786660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:04:33.786670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:04:33.786675Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-11-03T14:04:33.786721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:04:33.786732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:04:33.786737Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-11-03T14:04:33.786752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:04:33.786761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:04:33.786766Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-11-03T14:04:33.786774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:04:33.786782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:04:33.786787Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-11-03T14:04:33.786802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:04:33.786811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:04:33.786817Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-11-03T14:04:33.786832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:04:33.786841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ype" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecPlain } } } TtlSettings { Version: 1 } } OwnerPath: "/Root/olap" } } 2025-11-03T14:04:34.041624Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Complete;fline=columnshard__init.cpp:158;step=TTxUpdateSchema.Complete; 2025-11-03T14:04:34.041681Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxInit.Execute at tablet 9437184 2025-11-03T14:04:34.042078Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=2; 2025-11-03T14:04:34.042127Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:special_valuesLoadingTime=38; 2025-11-03T14:04:34.042156Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:special_valuesLoadingTime=22; 2025-11-03T14:04:34.042191Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tables_managerLoadingTime=28; 2025-11-03T14:04:34.042214Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;TablesLoadingTime=4; 2025-11-03T14:04:34.042223Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2025-11-03T14:04:34.042268Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-11-03T14:04:34.042278Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=5; 2025-11-03T14:04:34.042290Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tables_managerLoadingTime=94; 2025-11-03T14:04:34.042303Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=6; 2025-11-03T14:04:34.042316Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=7; 2025-11-03T14:04:34.042331Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=9; 2025-11-03T14:04:34.042342Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=4; 2025-11-03T14:04:34.042363Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15; 2025-11-03T14:04:34.042408Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=38; 2025-11-03T14:04:34.042419Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=1; 2025-11-03T14:04:34.042426Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-11-03T14:04:34.042433Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=1; 2025-11-03T14:04:34.042450Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=8; 2025-11-03T14:04:34.042557Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-11-03T14:04:34.042570Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=6; 2025-11-03T14:04:34.042593Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-11-03T14:04:34.042604Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=5; 2025-11-03T14:04:34.042613Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=2; 2025-11-03T14:04:34.042636Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=3; 2025-11-03T14:04:34.042643Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=556; 2025-11-03T14:04:34.042675Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-11-03T14:04:34.042684Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-11-03T14:04:34.042697Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];process=SwitchToWork;fline=columnshard_impl.cpp:1531;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-11-03T14:04:34.042805Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-11-03T14:04:34.042813Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:455;problem=Background activities cannot be started: no index at tablet; 2025-11-03T14:04:34.042900Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:265;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-11-03T14:04:34.042915Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:256;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-11-03T14:04:34.042921Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-11-03T14:04:34.042926Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:129:2160];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:455;problem=Background activities cannot be started: no index at tablet; VERIFY failed (2025-11-03T14:04:34.042947Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0xF5BEFB6) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0xF5B5857) NKikimr::NTabletPipe::TClient::TClient(NActors::TActorId const&, unsigned long, NKikimr::NTabletPipe::TClientConfig const&)+435 (0x1183EF03) NKikimr::NTabletPipe::CreateClient(NActors::TActorId const&, unsigned long, NKikimr::NTabletPipe::TClientConfig const&)+50 (0x1183ED22) NKikimr::NColumnShard::TColumnShard::Handle(TAutoPtr, TDelete>&)+284 (0x1AD9781C) NKikimr::NColumnShard::TColumnShard::StateWork(TAutoPtr&)+5290 (0x1AD8F3FA) NActors::IActor::Receive(TAutoPtr&)+216 (0xFD2C008) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+1389 (0x1D0AE7ED) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+4931 (0x1D0AB843) NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration)+473 (0x1D0AF9D9) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration)+67 (0x1D0BF663) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration)+102 (0x1D0BF426) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+24 (0x1D0B2228) NActors::TTestActorRuntimeBase::SimulateSleep(TDuration)+419 (0x1D0B2133) NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, unsigned long)+216 (0x1EFDED98) NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, NKikimr::NColumnShard::TestTableDescription const&, TBasicString>, unsigned long)+816 (0x1EFDF150) ??+0 (0xF3F78EF) NKikimr::NTestSuiteTColumnShardTestReadWrite::TestCompactionInGranule(bool, unsigned short)+496 (0xF3F74D0) NKikimr::NTestSuiteTColumnShardTestReadWrite::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xF426AF7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+120 (0xF75EDF8) NKikimr::NTestSuiteTColumnShardTestReadWrite::TCurrentTest::Execute()+481 (0xF426481) NUnitTest::TTestFactory::Execute()+817 (0xF75F541) NUnitTest::RunMain(int, char**)+3229 (0xF76DCDD) ??+0 (0x7FFA97BE6D90) __libc_start_main+128 (0x7FFA97BE6E40) _start+41 (0xE551029) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:131:2058] recipient: [1:113:2144] 2025-11-03T14:04:34.856092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:04:34.856120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:34.856127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:04:34.856137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:04:34.856146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:04:34.856151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:04:34.856161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:34.856190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:04:34.856324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:04:34.856402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:04:34.881418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7735: Cannot subscribe to console configs 2025-11-03T14:04:34.881444Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:34.890197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:04:34.890313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:04:34.890352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:04:34.904889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:04:34.905029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:04:34.905151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:34.905524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:34.906814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:34.906895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:04:34.907286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:34.907299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:34.907339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:04:34.907350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:34.907358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:04:34.907396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.909457Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:130:2155] sender: [1:245:2058] recipient: [1:15:2062] 2025-11-03T14:04:34.936335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:34.936428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.936500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:34.936510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:34.936584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:34.936603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:34.937567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:34.937627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:34.937702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.937714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:34.937722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:34.937728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:34.938185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.938198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:34.938204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:34.938563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.938592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:34.938599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:34.938607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:34.939369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:34.939801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:34.939864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:34.940099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:34.940131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 4294969455 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:34.940148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:34.940209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:34.940216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:34.940250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:34.940264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:34.940695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:34.940706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_ ... 057594046678944 2025-11-03T14:04:35.178825Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:35.178869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:35.178879Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:35.178886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:35.178892Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:35.179269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:35.179280Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:35.179286Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:35.179591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:35.179601Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:35.179607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:35.179615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:35.179649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:35.179942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:35.179980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:35.180221Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:35.180244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 141 RawX2: 8589936753 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:35.180254Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:35.180313Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 128 -> 240 2025-11-03T14:04:35.180341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:35.180373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:35.180385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-11-03T14:04:35.180781Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:35.180791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:35.180838Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:35.180844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:214:2214], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-11-03T14:04:35.180906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:35.180915Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2025-11-03T14:04:35.180928Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-11-03T14:04:35.180934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-11-03T14:04:35.180939Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:926: Part operation is done id#1:0 progress is 1/1 2025-11-03T14:04:35.180943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-11-03T14:04:35.180949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-11-03T14:04:35.180955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-11-03T14:04:35.180960Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:993: Operation and all the parts is done, operation id: 1:0 2025-11-03T14:04:35.180965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5557: RemoveTx for txid 1:0 2025-11-03T14:04:35.180977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-11-03T14:04:35.180984Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1002: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-11-03T14:04:35.180990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1009: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-11-03T14:04:35.181139Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-11-03T14:04:35.181153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-11-03T14:04:35.181159Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-11-03T14:04:35.181165Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-11-03T14:04:35.181170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-11-03T14:04:35.181184Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-11-03T14:04:35.181701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-11-03T14:04:35.181793Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-11-03T14:04:35.181913Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:277:2266] Bootstrap 2025-11-03T14:04:35.182129Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:277:2266] Become StateWork (SchemeCache [2:282:2271]) 2025-11-03T14:04:35.182957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:35.183001Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 101:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "" } 2025-11-03T14:04:35.183018Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 101:0, path# /MyRoot/ 2025-11-03T14:04:35.183038Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_streaming_query.cpp:102, at schemeshard: 72057594046678944 2025-11-03T14:04:35.183292Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:277:2266] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:35.190667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_streaming_query.cpp:102" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-11-03T14:04:35.190744Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_streaming_query.cpp:102, operation: CREATE STREAMING QUERY, path: 2025-11-03T14:04:35.190946Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::CommitOffsetTest [GOOD] Test command err: 2025-11-03T14:04:17.626937Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:17.626992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:17.662324Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:17.662494Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:17.662914Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:531:2418], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:17.663038Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:17.663092Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-11-03T14:04:17.663234Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:515:2183], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:17.663285Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-11-03T14:04:17.663300Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:17.752200Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:17.795680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:17.795723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:17.795937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:17.795953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:17.827881Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-11-03T14:04:17.828128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:17.828234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:17.887180Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-11-03T14:04:17.891197Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:416} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-11-03T14:04:18.006130Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:354} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 7568, node 1 TClient is connected to server localhost:1045 2025-11-03T14:04:18.050888Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:18.050906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:18.050909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:18.051027Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration Request timer = 0.003223990997 BASE_PERF = 1.733191263 2025-11-03T14:04:18.946680Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:18.948540Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7568499709371560834:2282];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:18.948562Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:18.972183Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:18.972216Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:18.973433Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:18.975554Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [3:7568499709371560566:2081] 1762178658939346 != 1762178658939349 2025-11-03T14:04:18.987244Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26607, node 3 2025-11-03T14:04:19.008197Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:19.012301Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:19.012312Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:19.012313Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:19.012376Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:26224 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-11-03T14:04:19.054879Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:19.059296Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-11-03T14:04:19.064637Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-11-03T14:04:19.065464Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) waiting... 2025-11-03T14:04:19.066604Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-11-03T14:04:19.067303Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-11-03T14:04:19.068073Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2025-11-03T14:04:19.387165Z node 3 :TICKET_PARSER INFO: ut_utils.cpp:68: Ticket parser: got TEvAuthorizeTicket event: Bearer token /Root 1 2025-11-03T14:04:19.387200Z node 3 :TICKET_PARSER INFO: ut_utils.cpp:130: Send TEvAuthorizeTicketResult success 2025-11-03T14:04:19.588629Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7568499713666528584:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:19.588657Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:19.588770Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7568499713666528597:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:19.588783Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7568499713666528598:2332], DatabaseId: /Ro ... r2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19980 offsets: 39979 already_written: false write_statistics { persist_duration_ms: 4 } 2025-11-03T14:04:33.482959Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19980 2025-11-03T14:04:33.482976Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19981 offsets: 39980 already_written: false write_statistics { persist_duration_ms: 4 } 2025-11-03T14:04:33.482979Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19981 2025-11-03T14:04:33.482995Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19982 offsets: 39981 already_written: false write_statistics { persist_duration_ms: 4 } 2025-11-03T14:04:33.482998Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19982 2025-11-03T14:04:33.483014Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19983 offsets: 39982 already_written: false write_statistics { persist_duration_ms: 4 } 2025-11-03T14:04:33.483017Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19983 2025-11-03T14:04:33.483033Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19984 offsets: 39983 already_written: false write_statistics { persist_duration_ms: 4 } 2025-11-03T14:04:33.483036Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19984 2025-11-03T14:04:33.483072Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19985 offsets: 39984 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483080Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19985 2025-11-03T14:04:33.483097Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19986 offsets: 39985 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483102Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19986 2025-11-03T14:04:33.483118Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19987 offsets: 39986 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483121Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19987 2025-11-03T14:04:33.483137Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19988 offsets: 39987 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483140Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19988 2025-11-03T14:04:33.483156Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19989 offsets: 39988 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483159Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19989 2025-11-03T14:04:33.483176Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19990 offsets: 39989 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483179Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19990 2025-11-03T14:04:33.483196Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19991 offsets: 39990 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483199Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19991 2025-11-03T14:04:33.483215Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19992 offsets: 39991 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483218Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19992 2025-11-03T14:04:33.483235Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19993 offsets: 39992 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483238Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19993 2025-11-03T14:04:33.483253Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19994 offsets: 39993 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483256Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19994 2025-11-03T14:04:33.483275Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19995 offsets: 39994 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483278Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19995 2025-11-03T14:04:33.483294Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19996 offsets: 39995 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483297Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19996 2025-11-03T14:04:33.483314Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19997 offsets: 39996 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483317Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19997 2025-11-03T14:04:33.483334Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19998 offsets: 39997 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483337Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19998 2025-11-03T14:04:33.483353Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 19999 offsets: 39998 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483355Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 19999 2025-11-03T14:04:33.483376Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session got write response: sequence_numbers: 20000 offsets: 39999 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 4 } 2025-11-03T14:04:33.483379Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: acknoledged message 20000 2025-11-03T14:04:33.514688Z :INFO: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session will now close 2025-11-03T14:04:33.514717Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: aborting 2025-11-03T14:04:33.514969Z :INFO: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: gracefully shut down, all writes complete 2025-11-03T14:04:33.514984Z :DEBUG: [] MessageGroupId [producer2] SessionId [producer2|358a60d2-db5961cf-688936fa-7a264ff_0] Write session: destroy 2025-11-03T14:04:34.082644Z node 6 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [6:7568499775037207811:2475], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-11-03T14:04:34.084783Z node 6 :KQP_SESSION WARN: kqp_session_actor.cpp:2506: SessionId: ydb://session/3?node_id=6&id=YjgwMzVjOTMtNDk2ZDQ0ZDItMTJkMWUxMjItNzBmMDA3NGQ=, ActorId: [6:7568499775037207809:2474], ActorState: ExecuteState, TraceId: 01k950hbcs3xh1mzaxg9v61rc6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-11-03T14:04:34.084932Z node 6 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-11-03T14:04:34.540691Z node 6 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:323: Access denied: self# [6:7568499775037207824:7170], for# username, access# DescribeSchema 2025-11-03T14:04:34.593334Z node 6 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:323: Access denied: self# [6:7568499775037207835:7173], for# username, access# DescribeSchema ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] Test command err: 2025-11-03T14:04:06.717687Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499656828910756:2167];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:06.717717Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:04:06.723121Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:04:06.723928Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7568499657408510851:2151];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:06.723982Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/00257c/r3tmp/tmptz1MDw/pdisk_1.dat 2025-11-03T14:04:06.726233Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2025-11-03T14:04:06.751420Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:06.753939Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:06.765587Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2487, node 1 2025-11-03T14:04:06.787055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/ud3d/00257c/r3tmp/yandexOiJozy.tmp 2025-11-03T14:04:06.787076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/ud3d/00257c/r3tmp/yandexOiJozy.tmp 2025-11-03T14:04:06.788302Z INFO: TTestServer started on Port 27453 GrpcPort 2487 2025-11-03T14:04:06.803281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/ud3d/00257c/r3tmp/yandexOiJozy.tmp 2025-11-03T14:04:06.803891Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:27453 PQClient connected to localhost:2487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:06.820434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:06.820473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:06.822253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:06.826971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:06.826989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:06.827002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:06.828313Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-11-03T14:04:06.828576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected waiting... waiting... 2025-11-03T14:04:06.857169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... waiting... waiting... 2025-11-03T14:04:07.007544Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:07.042527Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:07.196169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499661123878962:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.196208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499661123878938:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.196232Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.196358Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499661123878966:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.196382Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:07.197205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:07.203093Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499661123878965:2330], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-11-03T14:04:07.254928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.265331Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499661123879144:2790] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:07.271842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:07.286390Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7568499661123879165:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-11-03T14:04:07.286478Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2506: SessionId: ydb://session/3?node_id=1&id=NDY3ZmFkZjMtNDNkZjcyMi1iMDY1ZTIxMi02NDE3NjAxZQ==, ActorId: [1:7568499661123878932:2323], ActorState: ExecuteState, TraceId: 01k950gh4sc9faky7e6qdnkkrg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-11-03T14:04:07.287039Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-11-03T14:04:07.315446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.l ... -11-03T14:04:36.131382Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2746: [PQ: 72075186224037894] server disconnected, pipe [7:7568499784030610331:2514] destroyed 2025-11-03T14:04:36.131396Z node 8 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: some@random@consumer_7_1_6569667457003986912_v1 2025-11-03T14:04:36.131399Z node 8 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: some@random@consumer_7_1_6569667457003986912_v1 2025-11-03T14:04:36.133843Z :INFO: [/Root] TraceId [] SessionId [123|afa98a4e-eb865b03-6aedffbd-37dd638b_0] MessageGroupId [123] Write session: close. Timeout 0.000000s 2025-11-03T14:04:36.133853Z :INFO: [/Root] TraceId [] SessionId [123|afa98a4e-eb865b03-6aedffbd-37dd638b_0] MessageGroupId [123] Write session will now close 2025-11-03T14:04:36.133862Z :DEBUG: [/Root] TraceId [] SessionId [123|afa98a4e-eb865b03-6aedffbd-37dd638b_0] MessageGroupId [123] Write session: aborting 2025-11-03T14:04:36.133987Z :INFO: [/Root] TraceId [] SessionId [123|afa98a4e-eb865b03-6aedffbd-37dd638b_0] MessageGroupId [123] Write session: gracefully shut down, all writes complete 2025-11-03T14:04:36.133993Z :DEBUG: [/Root] TraceId [] SessionId [123|afa98a4e-eb865b03-6aedffbd-37dd638b_0] MessageGroupId [123] Write session: destroy 2025-11-03T14:04:36.134173Z node 7 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: 123|afa98a4e-eb865b03-6aedffbd-37dd638b_0 grpc read done: success: 0 data: 2025-11-03T14:04:36.134182Z node 7 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: 123|afa98a4e-eb865b03-6aedffbd-37dd638b_0 grpc read failed 2025-11-03T14:04:36.134189Z node 7 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: 123|afa98a4e-eb865b03-6aedffbd-37dd638b_0 grpc closed 2025-11-03T14:04:36.134196Z node 7 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: 123|afa98a4e-eb865b03-6aedffbd-37dd638b_0 is DEAD 2025-11-03T14:04:36.134438Z node 7 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-11-03T14:04:36.134503Z node 7 :PERSQUEUE DEBUG: pq_impl.cpp:2746: [PQ: 72075186224037896] server disconnected, pipe [7:7568499784030610308:2501] destroyed 2025-11-03T14:04:36.134519Z node 7 :PERSQUEUE DEBUG: partition_write.cpp:138: [72075186224037896][Partition][0][StateIdle] TPartition::DropOwner. 2025-11-03T14:04:36.134524Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037896][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:36.134526Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037896][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:36.134527Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037896][Partition][0][StateIdle] Try persist 2025-11-03T14:04:36.144578Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037895][Partition][3][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:36.144594Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037895][Partition][3][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:36.144597Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037895][Partition][3][StateIdle] Try persist 2025-11-03T14:04:36.144615Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037895][Partition][4][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:36.144617Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037895][Partition][4][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:36.144619Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037895][Partition][4][StateIdle] Try persist 2025-11-03T14:04:36.144645Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037896][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:36.144647Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037896][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:36.144649Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037896][Partition][0][StateIdle] Try persist 2025-11-03T14:04:36.144624Z node 8 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037894][Partition][1][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:36.144638Z node 8 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037894][Partition][1][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:36.144641Z node 8 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037894][Partition][1][StateIdle] Try persist 2025-11-03T14:04:36.144654Z node 8 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037894][Partition][2][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:36.144657Z node 8 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037894][Partition][2][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:36.144659Z node 8 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037894][Partition][2][StateIdle] Try persist 2025-11-03T14:04:36.181964Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7568499756478394927:2095], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-11-03T14:04:36.182009Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [8:7568499756478394927:2095], cacheItem# { Subscriber: { Subscriber: [8:7568499760773362380:2163] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-11-03T14:04:36.182033Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [8:7568499786543166521:2408], recipient# [8:7568499786543166520:2327], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-11-03T14:04:36.247518Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037895][Partition][3][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:36.247535Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037895][Partition][3][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:36.247539Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037895][Partition][3][StateIdle] Try persist 2025-11-03T14:04:36.247560Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037895][Partition][4][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:36.247567Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037895][Partition][4][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:36.247569Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037895][Partition][4][StateIdle] Try persist 2025-11-03T14:04:36.247584Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037896][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:36.247586Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037896][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:36.247590Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037896][Partition][0][StateIdle] Try persist 2025-11-03T14:04:36.307733Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2806: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7568499753965836577:2130], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-11-03T14:04:36.307787Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1888: FillEntry for TNavigate: self# [7:7568499753965836577:2130], cacheItem# { Subscriber: { Subscriber: [7:7568499758260804671:2665] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-11-03T14:04:36.307821Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:267: Send result: self# [7:7568499784030610369:4043], recipient# [7:7568499784030610368:2528], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-11-03T14:04:36.348872Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037895][Partition][3][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:36.348887Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037895][Partition][3][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:36.348891Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037895][Partition][3][StateIdle] Try persist 2025-11-03T14:04:36.348910Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037895][Partition][4][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:36.348913Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037895][Partition][4][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:36.348916Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037895][Partition][4][StateIdle] Try persist 2025-11-03T14:04:36.348931Z node 7 :PERSQUEUE DEBUG: partition.cpp:2289: [72075186224037896][Partition][0][StateIdle] Batching state before ContinueProcessTxsAndUserActs: 0 2025-11-03T14:04:36.348934Z node 7 :PERSQUEUE DEBUG: partition.cpp:2298: [72075186224037896][Partition][0][StateIdle] Batching state after ContinueProcessTxsAndUserActs: 1 2025-11-03T14:04:36.348939Z node 7 :PERSQUEUE DEBUG: partition.cpp:2321: [72075186224037896][Partition][0][StateIdle] Try persist ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 29090, MsgBus: 20662 2025-11-03T14:04:12.512011Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499680937950117:2078];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:12.512396Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:04:12.516866Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/003049/r3tmp/tmpJvrOtG/pdisk_1.dat 2025-11-03T14:04:12.560114Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29090, node 1 2025-11-03T14:04:12.574093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:12.574104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:12.574105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:12.574141Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20662 2025-11-03T14:04:12.615827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:12.615858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:12.617764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20662 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-11-03T14:04:12.652289Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:12.663069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:12.666850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-11-03T14:04:12.721181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.746918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.774482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.807806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.943215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499680937951712:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.943248Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.943331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499680937951722:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.943343Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.994010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.002776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.012839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.026576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.041236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.055345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.069049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.083080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.099980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499685232919880:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:13.100009Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499685232919885:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:13.100018Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:13.100098Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499685232919888:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:13.100118Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:13.100833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:1 ... le config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:32.710180Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:32.710182Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:32.710238Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:13278 TClient is connected to server localhost:13278 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-11-03T14:04:32.855095Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:32.857115Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-11-03T14:04:32.863644Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:32.903837Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:32.931554Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:32.947805Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:32.988027Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:33.167081Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7568499771130226050:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:33.167109Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:33.167308Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7568499771130226060:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:33.167319Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:33.180572Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:33.192740Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:33.210706Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:33.222416Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:33.238767Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:33.254161Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:33.268066Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:33.285370Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:33.315649Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7568499771130226921:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:33.315687Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7568499771130226926:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:33.315701Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:33.315882Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7568499771130226929:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:33.315902Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:33.316766Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:33.321291Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7568499771130226928:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-11-03T14:04:33.396670Z node 13 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [13:7568499771130226982:3549] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:33.666701Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:04:34.071306Z node 13 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1762178673955, txId: 281474976715673] shutting down >> KqpResultSetFormats::ArrowFormat_Returning-isOlap >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] >> Viewer::UseTransactionWhenExecuteDataActionQuery [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK >> ViewerTopicDataTests::TopicDataTest ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithLock Test command err: 2025-11-03T14:04:32.456742Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:107;event=initialize_shard;step=OnActivateExecutor; 2025-11-03T14:04:32.462709Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];fline=columnshard.cpp:126;event=initialize_shard;step=initialize_tiring_finished; 2025-11-03T14:04:32.462809Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2025-11-03T14:04:32.463721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-11-03T14:04:32.463786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-11-03T14:04:32.463838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-11-03T14:04:32.463862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-11-03T14:04:32.463884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-11-03T14:04:32.463908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-11-03T14:04:32.463931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-11-03T14:04:32.463961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-11-03T14:04:32.463982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-11-03T14:04:32.464003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2025-11-03T14:04:32.464022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2025-11-03T14:04:32.464042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2025-11-03T14:04:32.464062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:129:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2025-11-03T14:04:32.471566Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2025-11-03T14:04:32.471628Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2025-11-03T14:04:32.471640Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-11-03T14:04:32.471681Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-11-03T14:04:32.471736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-11-03T14:04:32.471752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-11-03T14:04:32.471758Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-11-03T14:04:32.471770Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2025-11-03T14:04:32.471779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-11-03T14:04:32.471787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-11-03T14:04:32.471793Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-11-03T14:04:32.471821Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-11-03T14:04:32.471832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-11-03T14:04:32.471841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-11-03T14:04:32.471847Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-11-03T14:04:32.471860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-11-03T14:04:32.471869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-11-03T14:04:32.471878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-11-03T14:04:32.471884Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-11-03T14:04:32.471894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-11-03T14:04:32.471904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-11-03T14:04:32.471910Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-11-03T14:04:32.471918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-11-03T14:04:32.471929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-11-03T14:04:32.471935Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-11-03T14:04:32.471965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-11-03T14:04:32.471974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-11-03T14:04:32.471979Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-11-03T14:04:32.471996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-11-03T14:04:32.472005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2025-11-03T14:04:32.472011Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2025-11-03T14:04:32.472019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2025-11-03T14:04:32.472028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2025-11-03T14:04:32.472034Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2025-11-03T14:04:32.472042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2025-11-03T14:04:32.472052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2025-11-03T14:04:32.472058Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2025-11-03T14:04:32.472075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2025-11-03T14:04:32.472085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ne=columnshard__init.cpp:151;step=TTxUpdateSchema.Execute_Finish; CreateInitShard: { InitShard { OwnerPathId: 1 Tables { PathId: 1 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "key" TypeId: 4 } Columns { Id: 2 Name: "field" TypeId: 4608 } KeyColumnNames: "key" DefaultCompression { Codec: ColumnCodecPlain } Indexes { Id: 1000 Name: "MAX::INDEX::key" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 1 } } } } TtlSettings { Version: 1 } } OwnerPath: "/Root/olap" } } 2025-11-03T14:04:35.690853Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Complete;fline=columnshard__init.cpp:158;step=TTxUpdateSchema.Complete; 2025-11-03T14:04:35.690901Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxInit.Execute at tablet 9437184 2025-11-03T14:04:35.691275Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=3; 2025-11-03T14:04:35.691329Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:special_valuesLoadingTime=41; 2025-11-03T14:04:35.691359Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:special_valuesLoadingTime=23; 2025-11-03T14:04:35.691393Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tables_managerLoadingTime=28; 2025-11-03T14:04:35.691418Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;TablesLoadingTime=4; 2025-11-03T14:04:35.691426Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;SchemaPresetLoadingTime=3; 2025-11-03T14:04:35.691434Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;TableVersionssLoadingTime=4; 2025-11-03T14:04:35.691442Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=4; 2025-11-03T14:04:35.691528Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tables_managerLoadingTime=129; 2025-11-03T14:04:35.691542Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=7; 2025-11-03T14:04:35.691555Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=7; 2025-11-03T14:04:35.691569Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=8; 2025-11-03T14:04:35.691578Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=4; 2025-11-03T14:04:35.691616Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=14; 2025-11-03T14:04:35.691664Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=41; 2025-11-03T14:04:35.691672Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=2; 2025-11-03T14:04:35.691679Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=1; 2025-11-03T14:04:35.691687Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=2; 2025-11-03T14:04:35.691703Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=7; 2025-11-03T14:04:35.691817Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=1; 2025-11-03T14:04:35.691830Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=7; 2025-11-03T14:04:35.691838Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=1; 2025-11-03T14:04:35.691850Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=6; 2025-11-03T14:04:35.691860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=3; 2025-11-03T14:04:35.691867Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=2; 2025-11-03T14:04:35.691873Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=587; 2025-11-03T14:04:35.691909Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2025-11-03T14:04:35.691919Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2025-11-03T14:04:35.691931Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1531;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-11-03T14:04:35.692066Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-11-03T14:04:35.692075Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:455;problem=Background activities cannot be started: no index at tablet; 2025-11-03T14:04:35.692189Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:265;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-11-03T14:04:35.692206Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:256;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-11-03T14:04:35.692212Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:449;event=EnqueueBackgroundActivities;periodic=0; 2025-11-03T14:04:35.692218Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:455;problem=Background activities cannot be started: no index at tablet; VERIFY failed (2025-11-03T14:04:35.692237Z): ydb/core/tablet/tablet_pipe_client.cpp:45 TClient(): requirement tabletId != 0 failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+438 (0xF5BEFB6) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+263 (0xF5B5857) NKikimr::NTabletPipe::TClient::TClient(NActors::TActorId const&, unsigned long, NKikimr::NTabletPipe::TClientConfig const&)+435 (0x1183EF03) NKikimr::NTabletPipe::CreateClient(NActors::TActorId const&, unsigned long, NKikimr::NTabletPipe::TClientConfig const&)+50 (0x1183ED22) NKikimr::NColumnShard::TColumnShard::Handle(TAutoPtr, TDelete>&)+284 (0x1AD9781C) NKikimr::NColumnShard::TColumnShard::StateWork(TAutoPtr&)+5290 (0x1AD8F3FA) NActors::IActor::Receive(TAutoPtr&)+216 (0xFD2C008) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+1389 (0x1D0AE7ED) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+4931 (0x1D0AB843) NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration)+473 (0x1D0AF9D9) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration)+67 (0x1D0BF663) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration)+102 (0x1D0BF426) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+24 (0x1D0B2228) NActors::TTestActorRuntimeBase::SimulateSleep(TDuration)+419 (0x1D0B2133) NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, TBasicString> const&, unsigned long)+216 (0x1EFDED98) NKikimr::NColumnShard::SetupSchema(NActors::TTestBasicRuntime&, NActors::TActorId&, unsigned long, NKikimr::NColumnShard::TestTableDescription const&, TBasicString>, unsigned long)+816 (0x1EFDF150) NKikimr::NColumnShard::PrepareTablet(NActors::TTestBasicRuntime&, unsigned long, std::__y1::vector> const&, unsigned int)+604 (0x1EFDFB2C) NKikimr::NTestSuiteEvWrite::TTestCaseWriteWithLock::Execute_(NUnitTest::TTestContext&)+800 (0xF3CD600) NKikimr::NTestSuiteEvWrite::TCurrentTest::Execute()::'lambda'()::operator()() const+71 (0xF4252D7) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+120 (0xF75EDF8) NKikimr::NTestSuiteEvWrite::TCurrentTest::Execute()+422 (0xF424BD6) NUnitTest::TTestFactory::Execute()+817 (0xF75F541) NUnitTest::RunMain(int, char**)+3229 (0xF76DCDD) ??+0 (0x7FB3D9429D90) __libc_start_main+128 (0x7FB3D9429E40) _start+41 (0xE551029) >> KqpResultSetFormats::ArrowFormat_Returning-isOlap [GOOD] >> test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] [GOOD] >> KqpQueryService::AlterTempTable >> test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] >> KqpService::ToDictCache-UseCache >> KqpQueryService::FlowControllOnHugeLiteralAsTable >> KqpResultSetFormats::ArrowFormat_ColumnOrder >> DataShardReadIterator::ShouldReadRangeReverse >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] [GOOD] >> KqpQueryService::ExecStats >> DataShardReadIterator::ShouldReadKeyCellVec >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace >> KqpQueryService::ShowCreateTable >> CommitOffset::Commit_WithSession_ParentNotFinished_OtherSession [GOOD] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] [GOOD] >> test.py::test[window-yql-15636-default.txt-Results] [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus [GOOD] >> KqpDocumentApi::RestrictWriteExplicitPrepare >> CommitOffset::DistributedTxCommit [GOOD] >> KqpQueryService::AlterTempTable [GOOD] >> Viewer::JsonStorageListingV2GroupIdFilter [GOOD] >> ViewerTopicDataTests::TopicDataTest [GOOD] >> KqpResultSetFormats::ArrowFormat_ColumnOrder [GOOD] |56.9%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut >> CommitOffset::Commit_WithSession_ParentNotFinished_OtherSession_ParentCommittedToEnd >> KqpQueryService::ExecStats [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] >> test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage >> KqpQueryService::AlterTable_SetNotNull_Invalid >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] >> Viewer::JsonStorageListingV2NodeIdFilter >> CommitOffset::DistributedTxCommit_ChildFirst |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest >> KqpDocumentApi::RestrictWriteExplicitPrepare [GOOD] >> KqpQueryService::ShowCreateTable [GOOD] >> test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] [GOOD] >> KqpQueryService::FlowControllOnHugeLiteralAsTable [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_None >> DataShardReadIterator::ShouldReadRangeReverse [GOOD] >> DataShardReadIterator::ShouldReadKeyCellVec [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive [GOOD] >> TReplicationWithRebootsTests::Create [GOOD] |56.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> KqpService::ToDictCache-UseCache [GOOD] >> TopicDescribe::BasicStartOffset [GOOD] >> KqpQueryService::ExecStatsPlan >> test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] [GOOD] >> KqpQueryService::AlterTable_SetNotNull_Invalid [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] [GOOD] >> KqpDocumentApi::Scripting >> KqpQueryService::FlowControllOnHugeRealTable+LongRow >> test_sql_streaming.py::test[watermarks-watermarks-default.txt] >> Viewer::JsonStorageListingV1NodeIdFilter >> TKesusTest::TestAcquireSemaphore >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive >> KqpResultSetFormats::ArrowFormat_Compression_None [GOOD] >> TopicDescribe::CompactionPreserveStartOffset >> KqpQueryService::ExecStatsPlan [GOOD] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] >> Viewer::SimpleFeatureFlags >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies >> KqpQueryService::ShowCreateSysView >> test_sql_streaming.py::test[watermarks-watermarks-default.txt] [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable+LongRow [GOOD] >> TKesusTest::TestAcquireSemaphore [GOOD] >> DataShardReadIterator::ShouldReadKeyArrow >> KqpDocumentApi::Scripting [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_ZSTD >> KqpQueryService::AlterTable_SetNotNull_Valid >> KqpService::TwoNodeOneShuttingDown >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] [GOOD] >> test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight [GOOD] >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds >> KqpQueryService::ExecStatsAst >> KqpQueryService::AlterTable_DropNotNull_Valid >> DataShardReadIterator::ShouldNotReadAfterCancel >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies >> KqpResultSetFormats::ArrowFormat_Compression_ZSTD [GOOD] >> KqpQueryService::ShowCreateSysView [GOOD] >> KqpQueryService::ExecStatsAst [GOOD] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] >> test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] [GOOD] >> KqpQueryService::DmlNoTx >> KqpQueryService::AlterTable_DropNotNull_Valid [GOOD] >> DataShardReadIterator::ShouldReadKeyArrow [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_LZ4_FRAME >> KqpQueryService::ExecuteRetryQuery >> KqpQueryService::AlterTable_SetNotNull_Valid [GOOD] >> KqpQueryService::SeveralCTAS-UseSink >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid >> KqpQueryService::SeveralCTAS-UseSink [GOOD] >> KqpQueryService::ExecuteRetryQuery [GOOD] >> KqpQueryService::AlterCdcTopic >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] >> KqpQueryService::Explain >> test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] |56.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |56.9%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/mind/address_classification/ut/unittest ------- [TM] {default-linux-x86_64, pic, relwithdebinfo} ydb/library/yql/tests/sql/hybrid_file/part3/pytest >> test.py::test[window-yql-15636-default.txt-Results] [GOOD] Test command err: 127.0.0.1 - - [03/Nov/2025 14:02:56] "GET /mylib.sql HTTP/1.1" 200 - ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2025-11-03T14:04:02.018143Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:02.018168Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:02.021278Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:02.021331Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:02.052763Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:02.052901Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=6763635485815259307, session=0, seqNo=0) 2025-11-03T14:04:02.052929Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:02.063760Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=6763635485815259307, session=1) 2025-11-03T14:04:02.063858Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=4799217038399757587, session=0, seqNo=0) 2025-11-03T14:04:02.063895Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:02.074700Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=4799217038399757587, session=2) 2025-11-03T14:04:02.074939Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-11-03T14:04:02.074983Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-11-03T14:04:02.074999Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:02.075042Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=222, session=2, semaphore="Lock2" count=1) 2025-11-03T14:04:02.075054Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-11-03T14:04:02.075065Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-11-03T14:04:02.075083Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=333, session=1, semaphore="Lock2" count=1) 2025-11-03T14:04:02.075093Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-11-03T14:04:02.085892Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=111) 2025-11-03T14:04:02.085920Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=222) 2025-11-03T14:04:02.085926Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=333) 2025-11-03T14:04:02.086047Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:154:2176], cookie=16032197361533904182, name="Lock1") 2025-11-03T14:04:02.086068Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:154:2176], cookie=16032197361533904182) 2025-11-03T14:04:02.086138Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:157:2179], cookie=10967884458116819420, name="Lock2") 2025-11-03T14:04:02.086146Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:157:2179], cookie=10967884458116819420) 2025-11-03T14:04:02.088739Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:02.088768Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:02.088812Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:02.088916Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:02.130916Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:02.130964Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:02.130974Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-11-03T14:04:02.130980Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-11-03T14:04:02.131091Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:196:2209], cookie=13838313580983707608, name="Lock1") 2025-11-03T14:04:02.131111Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:196:2209], cookie=13838313580983707608) 2025-11-03T14:04:02.131209Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:204:2216], cookie=14021280667813669796, name="Lock2") 2025-11-03T14:04:02.131217Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:204:2216], cookie=14021280667813669796) 2025-11-03T14:04:02.557750Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:02.568692Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:02.926576Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:02.937424Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:03.283306Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:03.294345Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:03.650685Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:03.661554Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:04.007171Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:04.018253Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:04.354599Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:04.365463Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:04.700018Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:04.711003Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:05.067382Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:05.078236Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:05.423667Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:05.434828Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:05.822936Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:05.835077Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:06.202273Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:06.213259Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:06.571301Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:06.591068Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:06.970701Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:06.987120Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:07.347473Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:07.362957Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:07.771675Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:07.783075Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:08.131343Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:08.142414Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:08.508557Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:08.519631Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:08.880453Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:08.891473Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:09.269015Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:09.279835Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:09.671909Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:09.682988Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:10.040323Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:10.051955Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:10.412737Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:10.424154Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:10.783182Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:10.802637Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:11.181365Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:11.192210Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:11.555001Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594 ... _check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:39.626684Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:40.007126Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:40.019512Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:40.384319Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:40.403563Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:40.786692Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:40.803211Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:41.178345Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:41.190988Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:41.565544Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:41.576529Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:41.930796Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:41.941893Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:42.299879Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:42.312107Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:42.674977Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:42.687086Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:43.061389Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:43.074931Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:43.458241Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-11-03T14:04:43.458279Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-11-03T14:04:43.458290Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-11-03T14:04:43.458324Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-11-03T14:04:43.458334Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-11-03T14:04:43.458343Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-11-03T14:04:43.469486Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-11-03T14:04:43.469699Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:368:2348], cookie=17050274353010340654, name="Lock1") 2025-11-03T14:04:43.469728Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:368:2348], cookie=17050274353010340654) 2025-11-03T14:04:43.469835Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:371:2351], cookie=4386404074998700424, name="Lock2") 2025-11-03T14:04:43.469846Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:371:2351], cookie=4386404074998700424) 2025-11-03T14:04:43.469910Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:374:2354], cookie=16170185321162662) 2025-11-03T14:04:43.469919Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:374:2354], cookie=16170185321162662) 2025-11-03T14:04:43.475778Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:43.475820Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:43.475887Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:43.476077Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:43.519434Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:43.519496Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-11-03T14:04:43.519508Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-11-03T14:04:43.519664Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:413:2384], cookie=1898042012742961338) 2025-11-03T14:04:43.519691Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:413:2384], cookie=1898042012742961338) 2025-11-03T14:04:43.519840Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:420:2390], cookie=11976583321472623107, name="Lock1") 2025-11-03T14:04:43.519856Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:420:2390], cookie=11976583321472623107) 2025-11-03T14:04:43.519973Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:423:2393], cookie=8425599212730629971, name="Lock2") 2025-11-03T14:04:43.519982Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:423:2393], cookie=8425599212730629971) 2025-11-03T14:04:43.647732Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:43.647758Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:43.650874Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:43.650901Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:43.672478Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:43.672627Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2160], cookie=8333361560346860530, session=0, seqNo=0) 2025-11-03T14:04:43.672656Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:43.683843Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2160], cookie=8333361560346860530, session=1) 2025-11-03T14:04:43.683936Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2160], cookie=12740889877466851955, session=0, seqNo=0) 2025-11-03T14:04:43.683972Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:43.694734Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2160], cookie=12740889877466851955, session=2) 2025-11-03T14:04:43.694813Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2160], cookie=111, session=1, semaphore="Sem1" count=1) 2025-11-03T14:04:43.706961Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2160], cookie=111) 2025-11-03T14:04:43.707132Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:149:2171], cookie=17562726293266311735, name="Sem1", limit=1) 2025-11-03T14:04:43.707173Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-11-03T14:04:43.719801Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:149:2171], cookie=17562726293266311735) 2025-11-03T14:04:43.719913Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2160], cookie=333, session=1, semaphore="Sem1" count=100500) 2025-11-03T14:04:43.734890Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2160], cookie=333) 2025-11-03T14:04:43.734996Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2160], cookie=222, session=1, semaphore="Sem1" count=1) 2025-11-03T14:04:43.735038Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-11-03T14:04:43.735080Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2160], cookie=333, session=2, semaphore="Sem1" count=1) 2025-11-03T14:04:43.747380Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2160], cookie=222) 2025-11-03T14:04:43.747416Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2160], cookie=333) 2025-11-03T14:04:43.747572Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:159:2181], cookie=5645171492714161196, name="Sem1") 2025-11-03T14:04:43.747597Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:159:2181], cookie=5645171492714161196) 2025-11-03T14:04:43.747696Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:162:2184], cookie=4488473221775030117, name="Sem1") 2025-11-03T14:04:43.747704Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:162:2184], cookie=4488473221775030117) 2025-11-03T14:04:43.747760Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:165:2187], cookie=14837556045816581949, name="Sem1", force=0) 2025-11-03T14:04:43.758917Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:165:2187], cookie=14837556045816581949) 2025-11-03T14:04:43.759103Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:170:2192], cookie=7860573580170818865, name="Sem1", force=1) 2025-11-03T14:04:43.759138Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-11-03T14:04:43.770093Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:170:2192], cookie=7860573580170818865) |56.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/tx/schemeshard/ut_replication_reboots/unittest >> TReplicationWithRebootsTests::Create [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:120:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:121:2058] recipient: [1:115:2146] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:136:2058] recipient: [1:114:2145] Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:141:2058] recipient: [1:115:2146] 2025-11-03T14:04:22.597568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7917: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-11-03T14:04:22.597595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7945: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:22.597602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7831: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-11-03T14:04:22.597609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7847: OperationsProcessing config: using default configuration 2025-11-03T14:04:22.597617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-11-03T14:04:22.597623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7853: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-11-03T14:04:22.597634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7977: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-11-03T14:04:22.597649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-11-03T14:04:22.597780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2025-11-03T14:04:22.597871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2025-11-03T14:04:22.620744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8080: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2025-11-03T14:04:22.620790Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:22.620910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8048: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:134:2158] sender: [1:180:2058] recipient: [1:15:2062] 2025-11-03T14:04:22.625963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2025-11-03T14:04:22.627274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2025-11-03T14:04:22.627331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-11-03T14:04:22.629462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2025-11-03T14:04:22.629522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2025-11-03T14:04:22.629643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1382: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:22.629736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-11-03T14:04:22.630387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:158: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:22.630440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2025-11-03T14:04:22.630811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-11-03T14:04:22.630827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-11-03T14:04:22.630856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2025-11-03T14:04:22.630865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-11-03T14:04:22.630873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2025-11-03T14:04:22.630893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7091: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:219:2058] recipient: [1:217:2217] Leader for TabletID 72057594037968897 is [1:223:2221] sender: [1:224:2058] recipient: [1:217:2217] 2025-11-03T14:04:22.632453Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:244:2058] recipient: [1:15:2062] 2025-11-03T14:04:22.658448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-11-03T14:04:22.658547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:22.658621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:599: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-11-03T14:04:22.658716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5533: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2025-11-03T14:04:22.658775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-11-03T14:04:22.658793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:22.659700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:22.659764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-11-03T14:04:22.659828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:22.659841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-11-03T14:04:22.659848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-11-03T14:04:22.659857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 2 -> 3 2025-11-03T14:04:22.660302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:22.660315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-11-03T14:04:22.660321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2683: Change state for txid 1:0 3 -> 128 2025-11-03T14:04:22.660673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:22.660685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-11-03T14:04:22.660691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-11-03T14:04:22.660701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-11-03T14:04:22.661446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-11-03T14:04:22.661837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:663: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-11-03T14:04:22.661893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:139:2161] sender: [1:259:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-11-03T14:04:22.662146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-11-03T14:04:22.662173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 Tabl ... ation_side_effects.cpp:1009: Publication details: tx: 1002, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-11-03T14:04:43.087108Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 274137603, Sender [55:212:2212], Recipient [55:131:2155]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-11-03T14:04:43.087116Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5314: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-11-03T14:04:43.087130Z node 55 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-11-03T14:04:43.087142Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-11-03T14:04:43.087148Z node 55 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 1002 2025-11-03T14:04:43.087156Z node 55 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-11-03T14:04:43.087161Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-11-03T14:04:43.087175Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-11-03T14:04:43.087336Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 274137603, Sender [55:212:2212], Recipient [55:131:2155]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Version: 2 } 2025-11-03T14:04:43.087346Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5314: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-11-03T14:04:43.087360Z node 55 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6254: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2025-11-03T14:04:43.087372Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 1002 2025-11-03T14:04:43.087377Z node 55 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1002 2025-11-03T14:04:43.087382Z node 55 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-11-03T14:04:43.087387Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:610: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-11-03T14:04:43.087400Z node 55 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1002, subscribers: 0 2025-11-03T14:04:43.087405Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-11-03T14:04:43.087946Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-11-03T14:04:43.087981Z node 55 :TX_PROXY INFO: describe.cpp:354: Actor# [55:405:2369] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2025-11-03T14:04:43.088019Z node 55 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: } } 2025-11-03T14:04:43.088027Z node 55 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /MyRoot1/Table, status# SCHEME_ERROR, issues# , iteration# 0 2025-11-03T14:04:43.088085Z node 55 :REPLICATION_CONTROLLER TRACE: controller.cpp:201: [controller 72075186233409546] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2025-11-03T14:04:43.088098Z node 55 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:24: [controller 72075186233409546][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2025-11-03T14:04:43.088108Z node 55 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186233409546][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /MyRoot1/Table: SCHEME_ERROR () 2025-11-03T14:04:43.090018Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-11-03T14:04:43.090029Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-11-03T14:04:43.091615Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-11-03T14:04:43.091626Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-11-03T14:04:43.091870Z node 55 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:92: [controller 72075186233409546][TxDiscoveryTargetsResult] Complete TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2025-11-03T14:04:43.091933Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-11-03T14:04:43.091940Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-11-03T14:04:43.091999Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877761, Sender [55:422:2386], Recipient [55:131:2155]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-11-03T14:04:43.092006Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5327: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-11-03T14:04:43.092011Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6147: Pipe server connected, at tablet: 72057594046678944 2025-11-03T14:04:43.092037Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271124996, Sender [55:419:2383], Recipient [55:131:2155]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1002 2025-11-03T14:04:43.092042Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5243: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-11-03T14:04:43.092055Z node 55 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-11-03T14:04:43.092075Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-11-03T14:04:43.092080Z node 55 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [55:420:2384] 2025-11-03T14:04:43.092103Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 269877764, Sender [55:422:2386], Recipient [55:131:2155]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-11-03T14:04:43.092108Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5328: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-11-03T14:04:43.092113Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6195: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1002 2025-11-03T14:04:43.092169Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5230: StateWork, received event# 271122945, Sender [55:423:2387], Recipient [55:131:2155]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-11-03T14:04:43.092179Z node 55 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5242: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-11-03T14:04:43.092191Z node 55 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-11-03T14:04:43.092241Z node 55 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 49us result status StatusSuccess 2025-11-03T14:04:43.092352Z node 55 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Replication" PathDescription { Self { Name: "Replication" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ReplicationDescription { Name: "Replication" Config { SrcConnectionParams { StaticCredentials { User: "user" } } Specific { Targets { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } ConsistencySettings { Row { } } } PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 ControllerId: 72075186233409546 State { StandBy { } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] Test command err: Trying to start YDB, gRPC: 6083, MsgBus: 17179 2025-11-03T14:04:39.665501Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499798452802524:2205];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:39.665606Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/00173f/r3tmp/tmpsQvFvy/pdisk_1.dat 2025-11-03T14:04:39.709724Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:39.721492Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6083, node 1 2025-11-03T14:04:39.749858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:39.749870Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:39.749871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:39.749920Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:17179 2025-11-03T14:04:39.772987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:39.773014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:39.774864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17179 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:39.821665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:39.995655Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:40.148228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499802747770315:2319], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.148251Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.148318Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499802747770326:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.148341Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499802747770329:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.148363Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.149228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:40.151188Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7568499802747770331:2324], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:04:40.224783Z node 1 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [1:7568499802747770382:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:40.278512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-11-03T14:04:40.284066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.320670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:188) 2025-11-03T14:04:40.341347Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-11-03T14:04:40.350457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.427286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:188) 2025-11-03T14:04:40.472749Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7568499802747770881:2390], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:21: Error: At function: KiReadTable!
:3:21: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-11-03T14:04:40.473395Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2506: SessionId: ydb://session/3?node_id=1&id=MzAyOTBjZTMtZWUyYzU3NTgtZTIzZmEwZi04Nzg3Nzc4Yg==, ActorId: [1:7568499802747770878:2388], ActorState: ExecuteState, TraceId: 01k950hhmm999ctfnc8ytwjecf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-11-03T14:04:40.475345Z node 1 :HIVE WARN: hive_impl.cpp:516: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-11-03T14:04:40.478303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-11-03T14:04:40.480396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715671, at schemeshard: 72057594046644480 2025-11-03T14:04:40.480748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-11-03T14:04:40.489123Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [1:7568499802747770934:2397], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-11-03T14:04:40.489931Z node 1 :KQP_SESSION WARN: kqp_session_actor.cpp:2506: SessionId: ydb://session/3?node_id=1&id=ODFhNmU2M2ItOWE3OGVhNDQtYjJmMjNmNGUtYmZlOGZhODc=, ActorId: [1:7568499802747770932:2396], ActorState: ExecuteState, TraceId: 01k950hhn30x674w4b9qtj70qg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 14239, MsgBus: 27672 2025-11-03T14:04:40.782261Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7568499804391865180:2079];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:40.782481Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:04:40.784337Z nod ... node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7568499810391285925:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:42.192967Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:42.193947Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:42.196448Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7568499810391285928:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:04:42.274399Z node 3 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [3:7568499810391285979:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:42.283392Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:42.340611Z node 3 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [3:7568499810391286138:2421] txid# 281474976715662, issues: { message: "CreateSetConstraintInitiate is not implemented. TablePath = \'/Root/test/alterNotNull\'" severity: 1 } 2025-11-03T14:04:42.340934Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [3:7568499810391286130:2343], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:30: Error: Execution of node: KiAlterTable!
: Error: CreateSetConstraintInitiate is not implemented. TablePath = '/Root/test/alterNotNull', code: 2029 2025-11-03T14:04:42.341023Z node 3 :KQP_SESSION WARN: kqp_session_actor.cpp:2506: SessionId: ydb://session/3?node_id=3&id=ZGRhYmNjNzYtNzc4ZTlhMTMtNmU4ZTJmZmQtYTExNzAwMzA=, ActorId: [3:7568499810391286128:2342], ActorState: ExecuteState, TraceId: 01k950hkez8m6xkw509sdp48zs, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: Trying to start YDB, gRPC: 10840, MsgBus: 29689 2025-11-03T14:04:42.696925Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:42.697080Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/00173f/r3tmp/tmpXtED9v/pdisk_1.dat 2025-11-03T14:04:42.713533Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10840, node 4 2025-11-03T14:04:42.736758Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:42.736772Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:42.736774Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:42.736817Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:29689 2025-11-03T14:04:42.796387Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:42.796423Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:29689 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-11-03T14:04:42.801375Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:42.803005Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:42.805588Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-11-03T14:04:42.945122Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:43.172260Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499815264157429:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:43.172258Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499815264157448:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:43.172293Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:43.172361Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499815264157459:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:43.172385Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:43.173030Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:43.175332Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7568499815264157458:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-11-03T14:04:43.250801Z node 4 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [4:7568499815264157511:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:43.317259Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:43.357121Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [4:7568499815264157661:2343], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:84: Error: Failed to convert type: Struct<'id':Int32,'val1':Null,'val2':Int32> to Struct<'id':Int32,'val1':Int32,'val2':Int32?>
:2:84: Error: Failed to convert 'val1': Null to Int32
:2:84: Error: Failed to convert input columns types to scheme types, code: 2031 2025-11-03T14:04:43.357197Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2506: SessionId: ydb://session/3?node_id=4&id=OTU5ODdiODEtYzgwMmNhMjYtNTQ5ZmFhM2UtMjQ4MDNmZDI=, ActorId: [4:7568499815264157659:2342], ActorState: ExecuteState, TraceId: 01k950hmesejpeqme71qfnd06f, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-11-03T14:04:43.363539Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:188) 2025-11-03T14:04:43.375423Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:188) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> ViewerTopicDataTests::TopicDataTest [GOOD] Test command err: Build = 0.2037829508 Merge = 0.9290208371 Destroy = 0.04290871163 Data has built Merge = 0.07319410075 Data has merged 2025-11-03T14:04:20.325932Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:20.325997Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:20.327994Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:20.328105Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:20.328142Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:20.371276Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:20.371309Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:20.371856Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:1579:2421], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:20.372069Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:20.372083Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-11-03T14:04:20.372309Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:1587:2363], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:20.372503Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:20.372624Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:1583:2363], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:20.372640Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-11-03T14:04:20.372840Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:20.372918Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:1571:2253], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:20.372943Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:1575:2363], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:20.372963Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-11-03T14:04:20.373067Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:20.373093Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:20.373186Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-11-03T14:04:20.373194Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:20.519949Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:20.577567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:20.577609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:20.577748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:20.577763Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:20.577799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:20.577808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:20.577846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:20.577855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:20.577890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:20.577905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:20.634678Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-11-03T14:04:20.634829Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-11-03T14:04:20.634895Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-11-03T14:04:20.634962Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-11-03T14:04:20.635060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:20.635245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:20.635281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:20.635310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:20.635339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:20.710875Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-11-03T14:04:20.736515Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:416} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-11-03T14:04:20.869926Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:354} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 4369, node 1 TClient is connected to server localhost:22732 2025-11-03T14:04:21.001774Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:21.001799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:21.001804Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:21.001925Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration Request timer = 14.00858527 BASE_PERF = 1.444322037 2025-11-03T14:04:37.287713Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7568499789823065466:2075];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:37.289627Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:04:37.290347Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:37.309273Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:37.309301Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:37.310786Z node 6 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:37.311045Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [6:7568499789823065429:2081] 1762178677287181 != 1762178677287184 2025-11-03T14:04:37.313816Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12192, node 6 2025-11-03T14:04:37.324065Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:37.324079Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:37.324083Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:37.324132Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:64973 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-11-03T14:04:37.341298Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 P ... session got write response: sequence_numbers: 8 offsets: 47 already_written: false write_statistics { } 2025-11-03T14:04:39.458933Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: acknoledged message 8 2025-11-03T14:04:39.458988Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session got write response: sequence_numbers: 9 offsets: 48 already_written: false write_statistics { } 2025-11-03T14:04:39.458991Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: acknoledged message 9 2025-11-03T14:04:39.459008Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session got write response: sequence_numbers: 10 offsets: 49 already_written: false write_statistics { } 2025-11-03T14:04:39.459014Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: acknoledged message 10 2025-11-03T14:04:39.459035Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session got write response: sequence_numbers: 11 offsets: 50 already_written: false write_statistics { } 2025-11-03T14:04:39.459038Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: acknoledged message 11 2025-11-03T14:04:39.459495Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: try to update token 2025-11-03T14:04:39.459510Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Send 2 message(s) (7 left), first sequence number is 12 2025-11-03T14:04:39.463870Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: try to update token 2025-11-03T14:04:39.463889Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Send 1 message(s) (6 left), first sequence number is 14 2025-11-03T14:04:39.465477Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: try to update token 2025-11-03T14:04:39.465493Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Send 1 message(s) (5 left), first sequence number is 15 2025-11-03T14:04:39.467627Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session got write response: sequence_numbers: 12 sequence_numbers: 13 offsets: 51 offsets: 52 already_written: false already_written: false write_statistics { persist_duration_ms: 3 } 2025-11-03T14:04:39.467641Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: acknoledged message 12 2025-11-03T14:04:39.467646Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: acknoledged message 13 2025-11-03T14:04:39.471035Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: try to update token 2025-11-03T14:04:39.471054Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Send 1 message(s) (4 left), first sequence number is 16 2025-11-03T14:04:39.471465Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: try to update token 2025-11-03T14:04:39.471480Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Send 1 message(s) (3 left), first sequence number is 17 2025-11-03T14:04:39.477354Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: try to update token 2025-11-03T14:04:39.477373Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Send 1 message(s) (2 left), first sequence number is 18 2025-11-03T14:04:39.477572Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: try to update token 2025-11-03T14:04:39.477584Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Send 1 message(s) (1 left), first sequence number is 19 2025-11-03T14:04:39.478776Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session got write response: sequence_numbers: 14 offsets: 53 already_written: false write_statistics { persist_duration_ms: 8 } 2025-11-03T14:04:39.478790Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: acknoledged message 14 2025-11-03T14:04:39.478830Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session got write response: sequence_numbers: 15 offsets: 54 already_written: false write_statistics { persist_duration_ms: 1 queued_in_partition_duration_ms: 8 } 2025-11-03T14:04:39.478834Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: acknoledged message 15 2025-11-03T14:04:39.480506Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session got write response: sequence_numbers: 16 offsets: 55 already_written: false write_statistics { } 2025-11-03T14:04:39.480516Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: acknoledged message 16 2025-11-03T14:04:39.482772Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session got write response: sequence_numbers: 17 offsets: 56 already_written: false write_statistics { persist_duration_ms: 1 } 2025-11-03T14:04:39.482783Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: acknoledged message 17 2025-11-03T14:04:39.482819Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session got write response: sequence_numbers: 18 offsets: 57 already_written: false write_statistics { persist_duration_ms: 1 } 2025-11-03T14:04:39.482821Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: acknoledged message 18 2025-11-03T14:04:39.482832Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session got write response: sequence_numbers: 19 offsets: 58 already_written: false write_statistics { } 2025-11-03T14:04:39.482834Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: acknoledged message 19 2025-11-03T14:04:39.483716Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: try to update token 2025-11-03T14:04:39.483730Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Send 1 message(s) (0 left), first sequence number is 20 2025-11-03T14:04:39.484611Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session got write response: sequence_numbers: 20 offsets: 59 already_written: false write_statistics { } 2025-11-03T14:04:39.484620Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: acknoledged message 20 2025-11-03T14:04:39.504242Z :INFO: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session will now close 2025-11-03T14:04:39.504271Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: aborting 2025-11-03T14:04:39.504481Z :INFO: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: gracefully shut down, all writes complete 2025-11-03T14:04:39.504508Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|13aabe68-525f0e9-f64186e5-7d9a6ae3_0] Write session: destroy 2025-11-03T14:04:39.739422Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 2025-11-03T14:04:39.830820Z :DEBUG: [] MessageGroupId [producer4] SessionId [] Write session: try to update token 2025-11-03T14:04:39.831051Z :INFO: [] MessageGroupId [producer4] SessionId [] Write session: Do CDS request 2025-11-03T14:04:39.831061Z :INFO: [] MessageGroupId [producer4] SessionId [] Start write session. Will connect to endpoint: localhost:31770 2025-11-03T14:04:39.832486Z :DEBUG: [] MessageGroupId [producer4] SessionId [] Write session: send init request: init_request { topic: "/Root/topic1" message_group_id: "producer4" } 2025-11-03T14:04:39.834097Z :INFO: [] MessageGroupId [producer4] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1762178679834 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-11-03T14:04:39.834131Z :INFO: [] MessageGroupId [producer4] SessionId [] Write session established. Init response: session_id: "producer4|c4a27fbf-66a0031d-e803790-3426d176_0" topic: "topic1" 2025-11-03T14:04:39.834704Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|c4a27fbf-66a0031d-e803790-3426d176_0] Write 1 messages with Id from 1 to 1 2025-11-03T14:04:39.834756Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|c4a27fbf-66a0031d-e803790-3426d176_0] Write session: close. Timeout = 18446744073709551 ms 2025-11-03T14:04:39.867496Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|c4a27fbf-66a0031d-e803790-3426d176_0] Write session: try to update token 2025-11-03T14:04:39.867525Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|c4a27fbf-66a0031d-e803790-3426d176_0] Send 1 message(s) (0 left), first sequence number is 1 2025-11-03T14:04:39.870811Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|c4a27fbf-66a0031d-e803790-3426d176_0] Write session got write response: sequence_numbers: 1 offsets: 60 already_written: false write_statistics { } 2025-11-03T14:04:39.870830Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|c4a27fbf-66a0031d-e803790-3426d176_0] Write session: acknoledged message 1 2025-11-03T14:04:39.934842Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|c4a27fbf-66a0031d-e803790-3426d176_0] Write session will now close 2025-11-03T14:04:39.934869Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|c4a27fbf-66a0031d-e803790-3426d176_0] Write session: aborting 2025-11-03T14:04:39.935063Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|c4a27fbf-66a0031d-e803790-3426d176_0] Write session: gracefully shut down, all writes complete 2025-11-03T14:04:39.935095Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|c4a27fbf-66a0031d-e803790-3426d176_0] Write session: destroy Size: 4194320 Got response:400: PathErrorUnknown Got response:400: No such partition in topic 2025-11-03T14:04:40.019356Z node 7 :PERSQUEUE ERROR: partition_read.cpp:820: [72075186224037889][Partition][0][StateIdle] reading from too big offset - topic topic1 partition 0 client $without_consumer EndOffset 61 offset 10000 2025-11-03T14:04:40.019383Z node 7 :PERSQUEUE ERROR: pq_impl.cpp:202: Answer error topic: 'topic1' partition: 0 messageNo: 0 requestId: error: trying to read from future. ReadOffset 10000, 0 EndOffset 61 Got response:400: Bad offset ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::SeveralCTAS-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 64401, MsgBus: 20095 2025-11-03T14:04:39.770306Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499796718405716:2065];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:39.770325Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001721/r3tmp/tmpnlyrfV/pdisk_1.dat 2025-11-03T14:04:39.838355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:39.838407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:39.839195Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:39.846566Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:39.846770Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:39.854708Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499796718405683:2081] 1762178679769877 != 1762178679769880 TServer::EnableGrpc on GrpcPort 64401, node 1 2025-11-03T14:04:39.868614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:39.868627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:39.868629Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:39.868680Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:20095 TClient is connected to server localhost:20095 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-11-03T14:04:39.944212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:39.946997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-11-03T14:04:39.957516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:39.976854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:40.001357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:40.017117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:40.077403Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:40.305324Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499801013374627:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.305358Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.305548Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499801013374637:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.305555Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.398703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.408905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.420223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.434332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.463402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.480517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.503558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.520745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.554347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499801013375508:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.554403Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.554567Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499801013375514:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.554585Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.554596Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499801013375513:2479], DatabaseId: /Root, PoolId: default, ... is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:41.992569Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7568499807044929531:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-11-03T14:04:42.092569Z node 2 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [2:7568499811339896880:3551] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:42.351232Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:04:42.403152Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:42.478974Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7568499811339897271:2536], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-11-03T14:04:42.481567Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2506: SessionId: ydb://session/3?node_id=2&id=NDM3ZDMzZjAtZDBmZTdjYzMtOWZlNTRlMWQtNWZhYzM4NDM=, ActorId: [2:7568499811339897184:2522], ActorState: ExecuteState, TraceId: 01k950hkk5ebtbqtgtpfp5ga6m, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-11-03T14:04:42.486963Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:570: Compilation failed, self: [2:7568499811339897284:2539], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-11-03T14:04:42.487382Z node 2 :KQP_SESSION WARN: kqp_session_actor.cpp:2506: SessionId: ydb://session/3?node_id=2&id=NDM3ZDMzZjAtZDBmZTdjYzMtOWZlNTRlMWQtNWZhYzM4NDM=, ActorId: [2:7568499811339897184:2522], ActorState: ExecuteState, TraceId: 01k950hkkj8vwwxdxsf02kja2g, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 62205, MsgBus: 22425 2025-11-03T14:04:42.922775Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7568499812141477699:2079];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:42.922949Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001721/r3tmp/tmpg9fIUj/pdisk_1.dat 2025-11-03T14:04:42.925811Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:42.937242Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62205, node 3 2025-11-03T14:04:42.949992Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:42.950006Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:42.950007Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:42.950049Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:22425 TClient is connected to server localhost:22425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:43.006287Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:43.024916Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:43.024953Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:43.026019Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:43.134437Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:43.380912Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7568499816436445592:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:43.380937Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:43.381049Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7568499816436445619:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:43.381057Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7568499816436445620:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:43.381076Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:43.381788Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:43.383981Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7568499816436445623:2322], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-11-03T14:04:43.444293Z node 3 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [3:7568499816436445675:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:43.470916Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:43.530473Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:43.577448Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:43.651656Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2025-11-03T14:04:43.657922Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[watermarks-watermarks-default.txt] [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [GOOD] >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites >> Viewer::SimpleFeatureFlags [GOOD] >> KqpQueryService::AlterCdcTopic [GOOD] |56.9%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_LZ4_FRAME [GOOD] >> KqpQueryService::DmlNoTx [GOOD] >> KqpResultSetFormats::ArrowFormat_Multistatement >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds [GOOD] >> DataShardReadIterator::ShouldReadRangeLeftInclusive >> KqpQueryService::Explain [GOOD] ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2025-11-03T14:04:03.571239Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:03.571270Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:03.575932Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:03.576016Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:03.607675Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:03.607827Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=12350599513648087364, session=0, seqNo=0) 2025-11-03T14:04:03.607859Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:03.618534Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=12350599513648087364, session=1) 2025-11-03T14:04:03.618601Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=17963855585614820922, session=0, seqNo=0) 2025-11-03T14:04:03.618636Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:03.629319Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=17963855585614820922, session=2) 2025-11-03T14:04:03.629423Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:149:2171], cookie=8132464165139242415, name="Sem1", limit=1) 2025-11-03T14:04:03.629446Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-11-03T14:04:03.640034Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:149:2171], cookie=8132464165139242415) 2025-11-03T14:04:03.640099Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=111, session=1, semaphore="Sem1" count=1) 2025-11-03T14:04:03.640126Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-11-03T14:04:03.640152Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=222, session=2, semaphore="Sem1" count=1) 2025-11-03T14:04:03.650814Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=111) 2025-11-03T14:04:03.650831Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=222) 2025-11-03T14:04:03.650931Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:157:2179], cookie=7505159282491515161, name="Sem1") 2025-11-03T14:04:03.650945Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:157:2179], cookie=7505159282491515161) 2025-11-03T14:04:03.650993Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:160:2182], cookie=2884200305973369190, name="Sem1") 2025-11-03T14:04:03.650998Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:160:2182], cookie=2884200305973369190) 2025-11-03T14:04:04.067052Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:04.077867Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:04.423741Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:04.434706Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:04.790373Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:04.801270Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:05.148124Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:05.159289Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:05.505434Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:05.516541Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:05.859920Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:05.871441Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:06.212624Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:06.227229Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:06.580746Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:06.593308Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:06.947210Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:06.958173Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:07.358759Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:07.369818Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:07.720705Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:07.731844Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:08.081339Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:08.095224Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:08.458212Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:08.469150Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:08.836480Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:08.847587Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:09.246890Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:09.257785Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:09.616175Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:09.627213Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:09.992152Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:10.005612Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:10.354330Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:10.365367Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:10.714790Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:10.731127Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:11.142887Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:11.154283Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:11.516362Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:11.527268Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:11.905937Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:11.916951Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:12.278926Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:12.289958Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:12.640723Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:12.655001Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:13.036532Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:13.047516Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:13.418359Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:13.432091Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:13.802748Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:13.817081Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:14.242820Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:14.255069Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:14.650846Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:14.667327Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:15.129077Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:15.143527Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:15.566823Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:15.579015Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:15.999575Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:16.019097Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:16.414798Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:16.434957Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:16.795367Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:16.807025Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::C ... G: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:39.569347Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:39.962789Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:39.974994Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:40.353413Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:40.373217Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:40.808975Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:40.831122Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:41.210762Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:41.230194Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:41.619079Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:41.630194Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:42.011334Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:42.022543Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:42.384757Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:42.396070Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:42.772345Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:42.787411Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:43.158807Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:43.170001Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:43.521111Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:43.535234Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:43.903386Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:43.919180Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:44.278297Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:44.291081Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:44.683134Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-11-03T14:04:44.683173Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-11-03T14:04:44.683185Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-11-03T14:04:44.694664Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-11-03T14:04:44.705070Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:456:2415], cookie=10233066161091588353, name="Sem1") 2025-11-03T14:04:44.705116Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:456:2415], cookie=10233066161091588353) 2025-11-03T14:04:44.885081Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:44.885125Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:44.888943Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:44.889001Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:44.923462Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:44.923638Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2162], cookie=9757138635549547106, session=0, seqNo=0) 2025-11-03T14:04:44.923682Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:44.934532Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2162], cookie=9757138635549547106, session=1) 2025-11-03T14:04:44.934664Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2162], cookie=17799810832798864526, session=0, seqNo=0) 2025-11-03T14:04:44.934707Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:44.945705Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2162], cookie=17799810832798864526, session=2) 2025-11-03T14:04:44.945814Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:137:2162], cookie=18430350259714025507, session=0, seqNo=0) 2025-11-03T14:04:44.945863Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 3 2025-11-03T14:04:44.958899Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:137:2162], cookie=18430350259714025507, session=3) 2025-11-03T14:04:44.959105Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:151:2173], cookie=11975225785034210785, name="Sem1", limit=3) 2025-11-03T14:04:44.959159Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-11-03T14:04:44.977168Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:151:2173], cookie=11975225785034210785) 2025-11-03T14:04:44.977278Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=111, session=1, semaphore="Sem1" count=2) 2025-11-03T14:04:44.977319Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-11-03T14:04:44.977358Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=222, session=2, semaphore="Sem1" count=1) 2025-11-03T14:04:44.977366Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-11-03T14:04:44.977379Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=333, session=3, semaphore="Sem1" count=1) 2025-11-03T14:04:44.991230Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=111) 2025-11-03T14:04:44.991272Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=222) 2025-11-03T14:04:44.991280Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=333) 2025-11-03T14:04:44.991432Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:159:2181], cookie=1225825520170730513, name="Sem1") 2025-11-03T14:04:44.991458Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:159:2181], cookie=1225825520170730513) 2025-11-03T14:04:44.991528Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:162:2184], cookie=1587993613437398456, name="Sem1") 2025-11-03T14:04:44.991538Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:162:2184], cookie=1587993613437398456) 2025-11-03T14:04:44.991576Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:137:2162], cookie=444, session=1, semaphore="Sem1" count=1) 2025-11-03T14:04:44.991612Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-11-03T14:04:45.003629Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:137:2162], cookie=444) 2025-11-03T14:04:45.003821Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:167:2189], cookie=5267732640612209264, name="Sem1") 2025-11-03T14:04:45.003851Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:167:2189], cookie=5267732640612209264) 2025-11-03T14:04:45.003919Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:170:2192], cookie=12564858501449635312, name="Sem1") 2025-11-03T14:04:45.003930Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:170:2192], cookie=12564858501449635312) 2025-11-03T14:04:45.008289Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:45.008338Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:45.008400Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:45.008538Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:45.053991Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:45.054071Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-11-03T14:04:45.054081Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-11-03T14:04:45.054087Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-11-03T14:04:45.054213Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:209:2222], cookie=8337086488835506902, name="Sem1") 2025-11-03T14:04:45.054236Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:209:2222], cookie=8337086488835506902) 2025-11-03T14:04:45.054359Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:218:2230], cookie=3840949494271963445, name="Sem1") 2025-11-03T14:04:45.054368Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:218:2230], cookie=3840949494271963445) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterCdcTopic [GOOD] Test command err: Trying to start YDB, gRPC: 31501, MsgBus: 7374 2025-11-03T14:04:39.851852Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499797062361187:2065];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:39.851869Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001769/r3tmp/tmpUXBWb8/pdisk_1.dat 2025-11-03T14:04:39.937241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions TServer::EnableGrpc on GrpcPort 31501, node 1 2025-11-03T14:04:39.942675Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:39.951074Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499797062361162:2081] 1762178679851381 != 1762178679851384 2025-11-03T14:04:39.963411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:39.963434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:39.966604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:39.967515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:39.967524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:39.967526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:39.967562Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:7374 TClient is connected to server localhost:7374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:40.055943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:40.059028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-11-03T14:04:40.092790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:40.133939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... waiting... 2025-11-03T14:04:40.189648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.199828Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:40.215765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:40.337423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499801357330109:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.337446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.337632Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499801357330119:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.337638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.415224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.425698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.440426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.461013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.475200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.489657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.503290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.516686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.536474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499801357330981:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.536497Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.536628Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499801357330986:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.536632Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499801357330987:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.536634Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Dat ... de(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:43.886975Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:43.887888Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:43.927972Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:43.939492Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:43.966406Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:43.980732Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:44.053516Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:44.270784Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499820460045774:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.270918Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.274653Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499820460045857:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.274684Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.275283Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.286106Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.296247Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.311404Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.327642Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.342162Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.353701Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.371789Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.402702Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499820460046646:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.402756Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.408587Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499820460046651:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.408625Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499820460046652:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.408661Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.409753Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:44.414703Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2025-11-03T14:04:44.414816Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7568499820460046655:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-11-03T14:04:44.474202Z node 4 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [4:7568499820460046707:3551] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:44.793958Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:04:44.817235Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.851025Z node 4 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037927:1][4:7568499820460047197:2544] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:20:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-11-03T14:04:44.870440Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:313) 2025-11-03T14:04:44.892957Z node 4 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [4:7568499820460047330:3927] txid# 281474976710676, issues: { message: "Cannot change partition count. Use split/merge instead" severity: 1 } 2025-11-03T14:04:44.893041Z node 4 :KQP_SESSION WARN: kqp_session_actor.cpp:2987: SessionId: ydb://session/3?node_id=4&id=MTE3MGZiYzYtNjMyNDNjMzQtODdlZTZhNzMtM2I5ODg3MDk=, ActorId: [4:7568499820460047262:2554], ActorState: ExecuteState, TraceId: 01k950hnyq3qncs5y6rx9m0hvh, Create QueryResponse for error on request, msg: Query failed, status: BAD_REQUEST:
: Error: Executing ESchemeOpAlterPersQueueGroup, code: 2017
: Error: Cannot change partition count. Use split/merge instead, code: 2017 ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/viewer/ut/unittest >> Viewer::SimpleFeatureFlags [GOOD] Test command err: BASE_PERF = 1.933909475 Build = 6.41884534 Merge = 8.463090027 Destroy = 3.95919987 2025-11-03T14:04:35.547841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:35.587054Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:35.588395Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:304:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:35.588439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:35.588478Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:35.687595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:35.687638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:35.697732Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:35.698437Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:34:2081] 1762178674777845 != 1762178674777849 2025-11-03T14:04:35.735172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16161, node 1 TClient is connected to server localhost:10346 2025-11-03T14:04:35.829170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:35.829192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:35.829197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:35.829313Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.1","Used":"10","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":1},"NodeId":1,"VDiskState":"OK","DiskSpace":"Green","AllocatedSize":"10","AvailableSize":"90","Overall":"Green"}],"DiskSpace":"Green","GroupGeneration":1,"VDiskNodeIds":[1],"Overall":"Red"}]} 2025-11-03T14:04:37.215218Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:37.217319Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:37.217855Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:327:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:37.217934Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:37.217945Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:37.283621Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:37.283667Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:37.287977Z node 2 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:37.288384Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [2:34:2081] 1762178676204986 != 1762178676204990 2025-11-03T14:04:37.319714Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9605, node 2 TClient is connected to server localhost:3158 2025-11-03T14:04:37.384233Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:37.384249Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:37.384253Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:37.384313Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.9","Used":"90","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":2},"NodeId":2,"VDiskState":"OK","DiskSpace":"Red","AllocatedSize":"90","AvailableSize":"10","Overall":"Red"}],"DiskSpace":"Red","GroupGeneration":1,"VDiskNodeIds":[2],"Overall":"Red"}]} 2025-11-03T14:04:38.330019Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:38.331205Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:38.332114Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:328:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:38.332184Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:38.332203Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:38.396291Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:38.396336Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:38.400536Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:38.401022Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [3:34:2081] 1762178677657485 != 1762178677657489 2025-11-03T14:04:38.435159Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24218, node 3 TClient is connected to server localhost:12424 2025-11-03T14:04:38.515386Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:38.515411Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:38.515417Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:38.515555Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"TotalGroups":"1","FoundGroups":"0"} 2025-11-03T14:04:39.807840Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:39.811354Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:304:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:39.811468Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:39.811489Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:39.892100Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:39.892296Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:39.892315Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:39.892760Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [4:34:2081] 1762178679060400 != 1762178679060404 2025-11-03T14:04:39.925556Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6717, node 4 TClient is connected to server localhost:20145 2025-11-03T14:04:39.989661Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:39.989682Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:39.989687Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:39.989839Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.1","Used":"10","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":4},"NodeId":4,"VDiskState":"OK","DiskSpace":"Red","AllocatedSize":"10","AvailableSize":"90","Overall":"Red"}],"DiskSpace":"Red","GroupGeneration":1,"VDiskNodeIds":[4],"Overall":"Red"}]} 2025-11-03T14:04:41.012210Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:41.014471Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:41.015117Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:327:2371], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:41.015176Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:41.015198Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:41.082831Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:41.082876Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:41.088943Z node 5 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:41.089434Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [5:34:2081] 1762178680349632 != 1762178680349636 2025-11-03T14:04:41.123085Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16603, node 5 TClient is connected to server localhost:15343 2025-11-03T14:04:41.203261Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:41.203281Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:41.203286Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:41.203372Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"TotalGroups":"1","FoundGroups":"0"} 2025-11-03T14:04:42.400530Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:42.402347Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:42.403800Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:326:2369], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:42.403874Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:42.403914Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:42.470693Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:42.470735Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:42.475428Z node 6 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:42.475930Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [6:34:2081] 1762178681731992 != 1762178681731996 2025-11-03T14:04:42.507296Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17890, node 6 TClient is connected to server localhost:6539 2025-11-03T14:04:42.575354Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:42.575374Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:42.575379Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:42.575461Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.8","Used":"80","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":6},"NodeId":6,"VDiskState":"OK","DiskSpace":"Green","AllocatedSize":"80","AvailableSize":"20","Overall":"Green"}],"DiskSpace":"Green","GroupGeneration":1,"VDiskNodeIds":[6],"Overall":"Red"}]} 2025-11-03T14:04:43.680634Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:43.682374Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:43.683572Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:299:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:43.683605Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-11-03T14:04:43.683737Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:43.755446Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:43.755490Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:43.763414Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:43.763949Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [7:34:2081] 1762178683005101 != 1762178683005105 2025-11-03T14:04:43.795299Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30580, node 7 TClient is connected to server localhost:24231 2025-11-03T14:04:43.861772Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:43.861792Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:43.861797Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:43.861891Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration json result: {"TotalGroups":"1","FoundGroups":"1","StorageGroups":[{"PoolName":"static","Kind":"","MediaType":"","Erasure":"none","Degraded":"1","Usage":"0.9","Used":"90","Limit":"100","Read":"0","Write":"0","GroupID":0,"ErasureSpecies":"none","VDisks":[{"VDiskId":{"GroupID":0,"GroupGeneration":1,"VDisk":0},"PDisk":{"PDiskId":0,"NodeId":7},"NodeId":7,"VDiskState":"OK","DiskSpace":"Green","AllocatedSize":"90","AvailableSize":"10","Overall":"Green"}],"DiskSpace":"Green","GroupGeneration":1,"VDiskNodeIds":[7],"Overall":"Red"}]} 2025-11-03T14:04:44.393300Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:44.393343Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-11-03T14:04:44.406978Z node 8 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:44.407298Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [8:7568499821211386667:2081] 1762178684387070 != 1762178684387073 2025-11-03T14:04:44.409951Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:44.409983Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:44.411924Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10640, node 8 2025-11-03T14:04:44.424651Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:44.424663Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:44.424665Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:44.424717Z node 8 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:31976 2025-11-03T14:04:44.555200Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions >> DataShardReadIteratorSysTables::ShouldRead >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] [GOOD] >> DataShardReadIterator::ShouldReadRangeCellVec >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DmlNoTx [GOOD] Test command err: Trying to start YDB, gRPC: 13615, MsgBus: 27217 2025-11-03T14:04:39.663490Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499799684835524:2150];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:39.664939Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/001737/r3tmp/tmpIEr0xM/pdisk_1.dat 2025-11-03T14:04:39.726672Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:39.733835Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13615, node 1 2025-11-03T14:04:39.757051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:39.757068Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:39.757070Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:39.757131Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:04:39.765212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:39.765250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:39.766647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27217 TClient is connected to server localhost:27217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-11-03T14:04:39.831951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:39.851391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:39.895973Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions waiting... 2025-11-03T14:04:39.907274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:39.935588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:39.948280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:40.164217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499803979804337:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.164250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.164362Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499803979804347:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.164375Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.239441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.260694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.277124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.304158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.323356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.337856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.353791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.366244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.385420Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499803979805210:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.385450Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.385867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499803979805215:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.385892Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499803979805216:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.385924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.387080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046 ... m file: (empty maybe) 2025-11-03T14:04:44.035889Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:04:44.036121Z node 4 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TClient is connected to server localhost:29612 TClient is connected to server localhost:29612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:44.099053Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:44.100451Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-11-03T14:04:44.104253Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:44.113998Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:44.114038Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:44.115144Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:44.181200Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:44.209784Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:44.224554Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:44.283375Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:44.485730Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499819366590906:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.485750Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.485904Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499819366590915:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.485911Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.500224Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.511638Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.527431Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.536682Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.548019Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.562613Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.578136Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.590619Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.609195Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499819366591778:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.609218Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.609279Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499819366591783:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.609287Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499819366591784:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.609307Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.610296Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:44.617032Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7568499819366591787:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2025-11-03T14:04:44.698117Z node 4 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [4:7568499819366591839:3549] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:45.012652Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Explain [GOOD] Test command err: Trying to start YDB, gRPC: 31502, MsgBus: 4046 2025-11-03T14:04:39.726757Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499799504617845:2154];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:39.726771Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/00172f/r3tmp/tmpNHG0Cx/pdisk_1.dat 2025-11-03T14:04:39.785885Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:39.795884Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31502, node 1 2025-11-03T14:04:39.826441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:39.826453Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:39.826454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:39.826496Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:04:39.842406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:39.842452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TClient is connected to server localhost:4046 2025-11-03T14:04:39.843726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:39.887826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:39.890780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-11-03T14:04:39.900962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... waiting... 2025-11-03T14:04:39.922251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:39.947343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... waiting... 2025-11-03T14:04:39.967286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.004045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:40.253029Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499803799586657:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.253068Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.253228Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499803799586667:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.253247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.320078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.337035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.350460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.363963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.378122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.391352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.406133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.430960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:40.463973Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499803799587529:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.464002Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.464109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499803799587534:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.464117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499803799587535:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.464137Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:40.465111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshar ... HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29179, node 4 2025-11-03T14:04:44.347148Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:44.347161Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:44.347163Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:44.347206Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:5839 TClient is connected to server localhost:5839 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-11-03T14:04:44.416963Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:44.420663Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-11-03T14:04:44.422894Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.444209Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:44.471915Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... waiting... 2025-11-03T14:04:44.486167Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.587336Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:44.747157Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499818733140220:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.747186Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.747509Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499818733140230:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.747522Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.768597Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.782717Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.798372Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.807245Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.821275Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.836121Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.855150Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.870892Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:44.887654Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499818733141093:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.887690Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.887722Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499818733141098:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.887728Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7568499818733141099:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.887735Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:44.888515Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:44.896747Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7568499818733141102:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-11-03T14:04:44.955446Z node 4 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [4:7568499818733141154:3555] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:45.290988Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> DataShardReadIterator::ShouldHandleReadAck >> DataShardReadIterator::ShouldReverseReadMultipleKeys >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn [GOOD] >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn >> DataShardReadIterator::ShouldNotReadAfterCancel [GOOD] >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged >> KqpService::SwitchCache+UseCache >> KqpQueryServiceScripts::ForgetScriptExecution >> KqpRm::DisonnectNodes >> test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] [GOOD] >> test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] >> KqpResultSetFormats::ArrowFormat_Multistatement [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always >> KqpRm::NodesMembershipByExchanger >> KqpRm::NotEnoughMemory >> KqpRm::SingleTask |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] [GOOD] >> KqpRm::NotEnoughExecutionUnits >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites [GOOD] >> DataShardReadIteratorConsistency::WriteLockThenUncommittedReadUpgradeRetryAndRestart >> KqpRm::SnapshotSharingByExchanger >> DataShardReadIterator::ShouldReadRangeLeftInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeRightInclusive >> KqpRm::NotEnoughMemory [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always [GOOD] >> KqpRm::SingleTask [GOOD] >> KqpRm::NotEnoughExecutionUnits [GOOD] >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 >> DataShardReadIterator::ShouldReadRangeCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeArrow >> DataShardReadIteratorSysTables::ShouldRead [GOOD] >> DataShardReadIterator::ShouldHandleReadAck [GOOD] >> DataShardReadIteratorVectorTopK::BadRequest >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck >> DataShardReadIterator::ShouldReverseReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne >> TopicTimestamp::TimestampRead_6MB_Topic_exact [GOOD] >> TopicTimestamp::TimestampRead_6MB_Topic_offset+middle ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2025-11-03T14:04:47.344526Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-11-03T14:04:47.344768Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-11-03T14:04:47.344868Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-11-03T14:04:47.345044Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-11-03T14:04:47.359902Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-11-03T14:04:47.361254Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-11-03T14:04:47.361290Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-11-03T14:04:47.361668Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:04:47.362028Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-11-03T14:04:47.362168Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:47.362176Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:47.362214Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-11-03T14:04:47.365200Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-11-03T14:04:47.365286Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-11-03T14:04:47.365302Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-11-03T14:04:47.365322Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:47.365338Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:47.365383Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:47.387889Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:47.388978Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-11-03T14:04:47.389087Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:47.389668Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-11-03T14:04:47.428768Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:47.428822Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:47.439766Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:47.439816Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:47.439835Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:47.439849Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:47.439874Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:47.439883Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:47.439893Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:47.439902Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:47.451233Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:47.451288Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:47.462237Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:47.462294Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-11-03T14:04:47.462541Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-11-03T14:04:47.462551Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-11-03T14:04:47.464935Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-11-03T14:04:47.464964Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-11-03T14:04:47.465535Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-11-03T14:04:47.465562Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-11-03T14:04:47.466027Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ud3d/001f00/r3tmp/tmpir7rUS/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-11-03T14:04:47.466120Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ud3d/001f00/r3tmp/tmpir7rUS/pdisk_1.dat 2025-11-03T14:04:47.466131Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/ud3d/001f00/r3tmp/tmpir7rUS/pdisk_1.dat 2025-11-03T14:04:47.466425Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-11-03T14:04:47.466483Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-11-03T14:04:47.466495Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-11-03T14:04:47.466534Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:47.466550Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-11-03T14:04:47.466611Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-11-03T14:04:47.466673Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-11-03T14:04:47.467206Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-11-03T14:04:47.467286Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:47.479448Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-11-03T14:04:47.479493Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-11-03T14:04:47.481963Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-11-03T14:04:47.482161Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ud3d/001f00/r3tmp/tmpir7rUS/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-11-03T14:04:47.482330Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ud3d/001f00/r3tmp/tmpir7rUS/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ud3d/001f00/r3tmp/tmpir7rUS/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17375218824946252196 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-11-03T14:04:47.482553Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:47.482886Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:47.482909Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:47.482967Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:47.482983Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:47.484861Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:47.484917Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:47.484929Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:47.484944Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:47.484958Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:47.485066Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:47.485079Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:47.485084Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:47.485106Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:428:2315] 2025-11-03T14:04:47.485165Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:47.485173Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:47.485177Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:47.485192Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:430:2119] 2025-11-03T14:04:47.485890Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:47.485904Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:417:2311] 2025-11-03T14:04:47.485921Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:47.485926Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:418:2115] 2025-11-03T14:04:47.486178Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:428:2315]} 2025-11-03T14:04:47.486199Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:47.486209Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:47.486212Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:47.486367Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:430:2119]} 2025-11-03T14:04:47.486416Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:47.486424Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:47.486427Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:47.511515Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-11-03T14:04:47.525997Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:47.526015Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-11-03T14:04:47.526025Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:47.526031Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2025-11-03T14:04:47.353804Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-11-03T14:04:47.354043Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-11-03T14:04:47.354131Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-11-03T14:04:47.354296Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-11-03T14:04:47.368938Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-11-03T14:04:47.370296Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-11-03T14:04:47.370333Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-11-03T14:04:47.370711Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:04:47.370911Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-11-03T14:04:47.370987Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:47.370994Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:47.371037Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-11-03T14:04:47.374712Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-11-03T14:04:47.374789Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-11-03T14:04:47.374804Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-11-03T14:04:47.374852Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:47.374868Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:47.374882Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:47.396616Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:47.397465Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-11-03T14:04:47.397524Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:47.399366Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-11-03T14:04:47.433117Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:47.433177Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:47.446783Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:47.446837Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:47.446855Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:47.446869Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:47.446909Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:47.446919Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:47.446929Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:47.446947Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:47.457879Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:47.457930Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:47.471004Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:47.471055Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-11-03T14:04:47.471284Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-11-03T14:04:47.471294Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-11-03T14:04:47.473563Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-11-03T14:04:47.473587Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-11-03T14:04:47.474057Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-11-03T14:04:47.474085Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-11-03T14:04:47.474590Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ud3d/001ef8/r3tmp/tmpChY7Zx/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-11-03T14:04:47.474776Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ud3d/001ef8/r3tmp/tmpChY7Zx/pdisk_1.dat 2025-11-03T14:04:47.474791Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/ud3d/001ef8/r3tmp/tmpChY7Zx/pdisk_1.dat 2025-11-03T14:04:47.475015Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-11-03T14:04:47.475029Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-11-03T14:04:47.475117Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-11-03T14:04:47.475143Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:47.475160Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-11-03T14:04:47.475205Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-11-03T14:04:47.475235Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-11-03T14:04:47.475802Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-11-03T14:04:47.475870Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:47.488814Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-11-03T14:04:47.488871Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-11-03T14:04:47.491179Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-11-03T14:04:47.491390Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ud3d/001ef8/r3tmp/tmpChY7Zx/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-11-03T14:04:47.491566Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ud3d/001ef8/r3tmp/tmpChY7Zx/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ud3d/001ef8/r3tmp/tmpChY7Zx/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15788232885666923316 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-11-03T14:04:47.491749Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:47.491818Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:47.491826Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:47.491859Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:47.491872Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:47.493613Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:47.493663Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:47.493679Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:47.493693Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:47.493716Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:47.493801Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:47.493813Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:47.493818Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:47.493840Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:430:2119] 2025-11-03T14:04:47.494310Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:47.494321Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:47.494326Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:47.494337Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:432:2317] 2025-11-03T14:04:47.495354Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:47.495368Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:419:2313] 2025-11-03T14:04:47.495393Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:47.495399Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:420:2115] 2025-11-03T14:04:47.495622Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:432:2317]} 2025-11-03T14:04:47.495671Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:47.495681Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:47.495685Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:47.495827Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:430:2119]} 2025-11-03T14:04:47.495870Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:47.495876Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:47.495880Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:47.515876Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-11-03T14:04:47.531067Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:47.531087Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-11-03T14:04:47.531097Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:47.531104Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2025-11-03T14:04:47.395546Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-11-03T14:04:47.395784Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-11-03T14:04:47.395884Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-11-03T14:04:47.396062Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-11-03T14:04:47.413704Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-11-03T14:04:47.415426Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-11-03T14:04:47.415485Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-11-03T14:04:47.415923Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:04:47.416639Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-11-03T14:04:47.416777Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:47.416785Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:47.416826Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-11-03T14:04:47.421592Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-11-03T14:04:47.421719Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-11-03T14:04:47.421736Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-11-03T14:04:47.421823Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:47.421842Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:47.421879Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:47.444453Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:47.447124Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-11-03T14:04:47.447248Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:47.452112Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-11-03T14:04:47.487220Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:47.487263Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:47.502302Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:47.502351Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:47.502365Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:47.502380Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:47.502409Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:47.502421Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:47.502428Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:47.502438Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:47.513316Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:47.513356Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:47.524120Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:47.524170Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-11-03T14:04:47.524485Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-11-03T14:04:47.524500Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-11-03T14:04:47.527193Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-11-03T14:04:47.527215Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-11-03T14:04:47.527767Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-11-03T14:04:47.527790Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-11-03T14:04:47.528197Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ud3d/001ed2/r3tmp/tmpKLi80Y/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-11-03T14:04:47.528275Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ud3d/001ed2/r3tmp/tmpKLi80Y/pdisk_1.dat 2025-11-03T14:04:47.528283Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/ud3d/001ed2/r3tmp/tmpKLi80Y/pdisk_1.dat 2025-11-03T14:04:47.528531Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-11-03T14:04:47.528562Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-11-03T14:04:47.528572Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-11-03T14:04:47.528601Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:47.528618Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-11-03T14:04:47.528668Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-11-03T14:04:47.528704Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-11-03T14:04:47.529106Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-11-03T14:04:47.529148Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:47.540078Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-11-03T14:04:47.540128Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-11-03T14:04:47.542460Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-11-03T14:04:47.542664Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ud3d/001ed2/r3tmp/tmpKLi80Y/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-11-03T14:04:47.542836Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ud3d/001ed2/r3tmp/tmpKLi80Y/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ud3d/001ed2/r3tmp/tmpKLi80Y/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3667152492368042494 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-11-03T14:04:47.543028Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:47.543091Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:47.543100Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:47.543138Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:47.543150Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:47.544639Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:47.544685Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:47.544693Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:47.544703Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:47.544715Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:47.544818Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:47.544830Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:47.544835Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:47.544852Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:428:2315] 2025-11-03T14:04:47.544893Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:47.544898Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:47.544900Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:47.544911Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:430:2119] 2025-11-03T14:04:47.545445Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:47.545450Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:417:2311] 2025-11-03T14:04:47.545460Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:47.545463Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:418:2115] 2025-11-03T14:04:47.545639Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:428:2315]} 2025-11-03T14:04:47.545651Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:47.545660Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:47.545664Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:47.545827Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:430:2119]} 2025-11-03T14:04:47.545865Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:47.545870Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:47.545872Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:47.568096Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-11-03T14:04:47.584054Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:47.584079Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-11-03T14:04:47.584091Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:47.584098Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always [GOOD] Test command err: Trying to start YDB, gRPC: 10451, MsgBus: 3551 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/003041/r3tmp/tmpDhipNb/pdisk_1.dat 2025-11-03T14:04:12.509328Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7568499682145321647:2225];send_to=[0:7307199536658146131:7762515]; 2025-11-03T14:04:12.509345Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-11-03T14:04:12.555902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:12.555929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:12.556930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:12.563853Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:12.564003Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:12.564891Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [1:7568499682145321459:2081] 1762178652503747 != 1762178652503750 TServer::EnableGrpc on GrpcPort 10451, node 1 2025-11-03T14:04:12.576901Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:12.576915Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:12.576917Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:12.576961Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:3551 TClient is connected to server localhost:3551 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:12.643520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:12.646213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-11-03T14:04:12.656852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.678663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.699230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.717981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:12.818096Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:12.988351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499682145323111:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.988380Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.988457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499682145323121:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:12.988469Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:13.053205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.060494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.067914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.082548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.096742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.111871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.125066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.139886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:13.166495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499686440291279:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:13.166542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:13.166801Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499686440291284:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:13.166811Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7568499686440291285:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:13.166834Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, Dat ... /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:46.251339Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:46.251390Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7568499829702402265:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:46.251397Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7568499829702402266:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:46.251419Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:46.252349Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:46.255738Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7568499829702402269:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2025-11-03T14:04:46.346218Z node 6 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [6:7568499829702402321:3561] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 17], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } 2025-11-03T14:04:46.640834Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=360;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=248;columns=2; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; Trying to start YDB, gRPC: 27804, MsgBus: 23339 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/003041/r3tmp/tmpuxuyuY/pdisk_1.dat 2025-11-03T14:04:47.046842Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2025-11-03T14:04:47.046931Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:47.065282Z node 7 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:47.068845Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1229: Notification cookie mismatch for subscription [7:7568499833648528914:2081] 1762178687043224 != 1762178687043227 TServer::EnableGrpc on GrpcPort 27804, node 7 2025-11-03T14:04:47.078293Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:47.078319Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:47.078321Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:47.078371Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration TClient is connected to server localhost:23339 TClient is connected to server localhost:23339 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-11-03T14:04:47.146712Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:47.146742Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:47.147075Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) waiting... 2025-11-03T14:04:47.147653Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:47.221842Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) waiting... 2025-11-03T14:04:47.278333Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2025-11-03T14:04:47.470909Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7568499833648530022:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:47.470954Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:47.471135Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7568499833648530048:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:47.471147Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7568499833648530049:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:47.471166Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-11-03T14:04:47.472145Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2025-11-03T14:04:47.475262Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:122: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-11-03T14:04:47.475363Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7568499833648530052:2367], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-11-03T14:04:47.547869Z node 7 :TX_PROXY ERROR: schemereq.cpp:593: Actor# [7:7568499833648530103:2645] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges), source_location: ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:92" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2128;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3920;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=560;columns=4; >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow >> KqpRm::Reduce >> KqpRm::DisonnectNodes [GOOD] |57.0%| [LD] {default-linux-x86_64, relwithdebinfo} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |57.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |57.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite >> DataShardReadIteratorConsistency::WriteLockThenUncommittedReadUpgradeRetryAndRestart [GOOD] >> DataShardReadIteratorConsistency::WriteLockThenUncommittedReadUpgradeRestartWithStateMigrationRetryAndRestartWithoutStateMigration >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryWriteManyRows+Commit >> test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] [GOOD] ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2025-11-03T14:04:47.001208Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-11-03T14:04:47.001452Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-11-03T14:04:47.001555Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-11-03T14:04:47.001724Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-11-03T14:04:47.032531Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-11-03T14:04:47.033906Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-11-03T14:04:47.033946Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-11-03T14:04:47.034381Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:04:47.035577Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-11-03T14:04:47.035762Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:47.035771Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:47.035821Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-11-03T14:04:47.038868Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-11-03T14:04:47.038950Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-11-03T14:04:47.038964Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-11-03T14:04:47.038982Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:47.038996Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:47.039034Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:47.066959Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:47.070980Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-11-03T14:04:47.071087Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:47.071599Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-11-03T14:04:47.110912Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:47.110967Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:47.123375Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:47.123428Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:47.123446Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:47.123460Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:47.123489Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:47.123498Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:47.123505Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:47.123513Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:47.135409Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:47.135462Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:47.146354Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:47.146408Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-11-03T14:04:47.146688Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-11-03T14:04:47.146696Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-11-03T14:04:47.148874Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-11-03T14:04:47.148891Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-11-03T14:04:47.149412Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-11-03T14:04:47.149432Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-11-03T14:04:47.149814Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ud3d/001f14/r3tmp/tmpOW4cR8/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-11-03T14:04:47.149896Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ud3d/001f14/r3tmp/tmpOW4cR8/pdisk_1.dat 2025-11-03T14:04:47.149902Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/ud3d/001f14/r3tmp/tmpOW4cR8/pdisk_1.dat 2025-11-03T14:04:47.150151Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-11-03T14:04:47.150177Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-11-03T14:04:47.150185Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-11-03T14:04:47.150213Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:47.150228Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-11-03T14:04:47.150270Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-11-03T14:04:47.150307Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-11-03T14:04:47.150929Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-11-03T14:04:47.150988Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:47.163079Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-11-03T14:04:47.163141Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-11-03T14:04:47.165748Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-11-03T14:04:47.165916Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ud3d/001f14/r3tmp/tmpOW4cR8/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-11-03T14:04:47.166071Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ud3d/001f14/r3tmp/tmpOW4cR8/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ud3d/001f14/r3tmp/tmpOW4cR8/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5679929559494863565 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-11-03T14:04:47.166259Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:47.166323Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:47.166332Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:47.166368Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:47.166381Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:47.168252Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:47.168298Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:47.168307Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:47.168319Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:47.168331Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:47.168429Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:47.168439Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:47.168444Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:47.168465Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:428:2315] 2025-11-03T14:04:47.168515Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:47.168521Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:47.168525Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:47.168536Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:430:2119] 2025-11-03T14:04:47.169313Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:47.169350Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:417:2311] 2025-11-03T14:04:47.169365Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:47.169369Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:418:2115] 2025-11-03T14:04:47.169598Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:428:2315]} 2025-11-03T14:04:47.169608Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:47.169617Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:47.169621Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:47.169765Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:430:2119]} 2025-11-03T14:04:47.169809Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:47.169814Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:47.169818Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:47.185585Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-11-03T14:04:47.210524Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:47.210555Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-11-03T14:04:47.210568Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:47.210576Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-11-03T14:04:47.290316Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal SlotSizeInUnits: 0 } } 2025-11-03T14:04:47.332112Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError SlotSizeInUnits: 0 } } 2025-11-03T14:04:47.373320Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } } 2025-11-03T14:04:47.663962Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-11-03T14:04:47.664927Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-11-03T14:04:47.665085Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:48.204302Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 2 2025-11-03T14:04:48.204440Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 2 2025-11-03T14:04:48.204492Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 2 2025-11-03T14:04:48.204627Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:82:2085] ServerId# [1:398:2298] TabletId# 72057594037932033 PipeClientId# [2:82:2085] 2025-11-03T14:04:48.204671Z node 2 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [2:189:2106] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-11-03T14:04:48.204693Z node 2 :LOCAL DEBUG: local.cpp:277: TEvTabletPipe::TEvClientDestroyed {TabletId=72057594046578946 ClientId=[2:430:2119]} 2025-11-03T14:04:48.204700Z node 2 :LOCAL DEBUG: local.cpp:220: TLocalNodeRegistrar HandlePipeDestroyed - DISCONNECTED 2025-11-03T14:04:48.204709Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:48.204745Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:560:2119] 2025-11-03T14:04:48.205905Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-11-03T14:04:48.205935Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-11-03T14:04:48.205994Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:560:2119]} 2025-11-03T14:04:48.206038Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError SlotSizeInUnits: 0 } } 2025-11-03T14:04:48.206091Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:48.206105Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:48.206110Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:48.219008Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] >> KqpRm::NodesMembershipByExchanger [GOOD] >> test.py::test[aggregate-group_by_mul_ru_ru--Results] [GOOD] >> test.py::test[aggregate-group_by_rollup_column_ref--Results] >> KqpRm::Reduce [GOOD] >> DataShardReadIterator::ShouldReadRangeRightInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeOneByOne ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2025-11-03T14:04:47.388798Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-11-03T14:04:47.389050Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-11-03T14:04:47.389187Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-11-03T14:04:47.389364Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-11-03T14:04:47.402308Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-11-03T14:04:47.404050Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-11-03T14:04:47.404088Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-11-03T14:04:47.404492Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:04:47.404706Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-11-03T14:04:47.404786Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:47.404793Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:47.404831Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-11-03T14:04:47.408358Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-11-03T14:04:47.408439Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-11-03T14:04:47.408457Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-11-03T14:04:47.408494Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:47.408511Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:47.408524Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:47.430679Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:47.431772Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-11-03T14:04:47.431854Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:47.432665Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-11-03T14:04:47.466328Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:47.466382Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:47.478151Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:47.478203Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:47.478219Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:47.478234Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:47.478255Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:47.478262Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:47.478270Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:47.478276Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:47.489196Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:47.489247Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:47.502410Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:47.502480Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-11-03T14:04:47.502751Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-11-03T14:04:47.502762Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-11-03T14:04:47.505190Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-11-03T14:04:47.505215Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-11-03T14:04:47.505697Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-11-03T14:04:47.505721Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-11-03T14:04:47.506201Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ud3d/001f12/r3tmp/tmpjGaP5D/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-11-03T14:04:47.506289Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ud3d/001f12/r3tmp/tmpjGaP5D/pdisk_1.dat 2025-11-03T14:04:47.506317Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/ud3d/001f12/r3tmp/tmpjGaP5D/pdisk_1.dat 2025-11-03T14:04:47.506534Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-11-03T14:04:47.506549Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-11-03T14:04:47.507147Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-11-03T14:04:47.507204Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:47.507221Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-11-03T14:04:47.507284Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-11-03T14:04:47.507313Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-11-03T14:04:47.507850Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-11-03T14:04:47.507934Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:47.523819Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-11-03T14:04:47.523887Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-11-03T14:04:47.526275Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-11-03T14:04:47.526500Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ud3d/001f12/r3tmp/tmpjGaP5D/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-11-03T14:04:47.526807Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ud3d/001f12/r3tmp/tmpjGaP5D/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ud3d/001f12/r3tmp/tmpjGaP5D/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17708001794862131231 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-11-03T14:04:47.527010Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:47.527088Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:47.527095Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:47.527131Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:47.527144Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:47.529133Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:47.529190Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:47.529210Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:47.529225Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:47.529248Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:47.529329Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:47.529339Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:47.529344Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:47.529360Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:430:2119] 2025-11-03T14:04:47.529828Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:47.529841Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:47.529845Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:47.529858Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:432:2317] 2025-11-03T14:04:47.530208Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:47.530219Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:419:2313] 2025-11-03T14:04:47.530237Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:47.530241Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:420:2115] 2025-11-03T14:04:47.530394Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:432:2317]} 2025-11-03T14:04:47.530429Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:47.530436Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:47.530438Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:47.530553Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:430:2119]} 2025-11-03T14:04:47.530586Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:47.530590Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:47.530592Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:47.549318Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-11-03T14:04:47.564335Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:47.564360Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-11-03T14:04:47.564371Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:47.564377Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-11-03T14:04:47.643040Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal SlotSizeInUnits: 0 } } 2025-11-03T14:04:47.684877Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError SlotSizeInUnits: 0 } } 2025-11-03T14:04:47.730324Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } } 2025-11-03T14:04:48.020988Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-11-03T14:04:48.027972Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-11-03T14:04:48.028122Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2025-11-03T14:04:48.716504Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-11-03T14:04:48.716751Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-11-03T14:04:48.716861Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-11-03T14:04:48.717035Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-11-03T14:04:48.730383Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-11-03T14:04:48.732125Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-11-03T14:04:48.732164Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-11-03T14:04:48.732563Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:04:48.732804Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-11-03T14:04:48.732891Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:48.732899Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:48.732937Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-11-03T14:04:48.739441Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-11-03T14:04:48.739531Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-11-03T14:04:48.739546Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-11-03T14:04:48.739583Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:48.739600Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:48.739613Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:48.763402Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:48.766125Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-11-03T14:04:48.766207Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:48.767613Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-11-03T14:04:48.802222Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:48.802280Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:48.815485Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:48.815539Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:48.815556Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:48.815573Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:48.815600Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:48.815609Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:48.815616Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:48.815625Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:48.831259Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:48.831313Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:48.845525Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:48.845576Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-11-03T14:04:48.845821Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-11-03T14:04:48.845829Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-11-03T14:04:48.848169Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-11-03T14:04:48.848192Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-11-03T14:04:48.848647Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-11-03T14:04:48.848666Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-11-03T14:04:48.849078Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ud3d/001ecd/r3tmp/tmpJSIE19/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-11-03T14:04:48.849157Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ud3d/001ecd/r3tmp/tmpJSIE19/pdisk_1.dat 2025-11-03T14:04:48.849163Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/ud3d/001ecd/r3tmp/tmpJSIE19/pdisk_1.dat 2025-11-03T14:04:48.849335Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-11-03T14:04:48.849343Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-11-03T14:04:48.849412Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-11-03T14:04:48.849432Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:48.849446Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-11-03T14:04:48.849483Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-11-03T14:04:48.849505Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-11-03T14:04:48.850166Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-11-03T14:04:48.850239Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:48.862047Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-11-03T14:04:48.862118Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-11-03T14:04:48.864489Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-11-03T14:04:48.864657Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ud3d/001ecd/r3tmp/tmpJSIE19/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-11-03T14:04:48.864813Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ud3d/001ecd/r3tmp/tmpJSIE19/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ud3d/001ecd/r3tmp/tmpJSIE19/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2210497865840634856 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-11-03T14:04:48.864990Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:48.865054Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:48.865060Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:48.865090Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:48.865102Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:48.868996Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:48.869076Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:48.869099Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:48.869117Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:48.869141Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:48.869229Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:48.869238Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:48.869254Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:48.869272Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:430:2119] 2025-11-03T14:04:48.869735Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:48.869742Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:48.869746Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:48.869754Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:432:2317] 2025-11-03T14:04:48.870109Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:48.870115Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:419:2313] 2025-11-03T14:04:48.870129Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:48.870133Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:420:2115] 2025-11-03T14:04:48.870342Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:432:2317]} 2025-11-03T14:04:48.870386Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:48.870394Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:48.870398Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:48.870519Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:430:2119]} 2025-11-03T14:04:48.870562Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:48.870568Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:48.870571Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:48.886795Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-11-03T14:04:48.902151Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:48.902175Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-11-03T14:04:48.902187Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:48.902193Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. >> KqpRm::ManyTasks >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged [GOOD] >> KqpQueryService::ExecuteQueryWithWorkloadManager >> KqpRm::SingleSnapshotByExchanger >> KqpRm::ResourceBrokerNotEnoughResources >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 >> KqpQueryService::TableSink_Htap+withOltpSink >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead |57.0%| [TM] {default-linux-x86_64, relwithdebinfo} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] [GOOD] >> DataShardReadIterator::ShouldReadRangeArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] >> BasicStatistics::TwoNodes [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRanges >> KqpRm::ManyTasks [GOOD] >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeys >> DataShardReadIteratorConsistency::WriteLockThenUncommittedReadUpgradeRestartWithStateMigrationRetryAndRestartWithoutStateMigration [GOOD] >> DataShardReadIteratorConsistency::WriteLockUncommittedConflictFailure ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2025-11-03T14:04:00.838400Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:00.838429Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:00.841779Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:00.841852Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:00.873313Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:00.873462Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=11940930201537746908, session=0, seqNo=0) 2025-11-03T14:04:00.873505Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:00.884371Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=11940930201537746908, session=1) 2025-11-03T14:04:00.884609Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=111, session=1, semaphore="Lock1" count=1) 2025-11-03T14:04:00.884670Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-11-03T14:04:00.884687Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:00.895477Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=111) 2025-11-03T14:04:00.895558Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:137:2162], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-11-03T14:04:00.906476Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:137:2162], cookie=222) 2025-11-03T14:04:00.906668Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:154:2176], cookie=3836548402667003642, name="Lock1") 2025-11-03T14:04:00.906694Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:154:2176], cookie=3836548402667003642) 2025-11-03T14:04:01.114465Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:01.114498Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:01.118907Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:01.119053Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:01.140960Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:01.141176Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:138:2162], cookie=8485732911443537745, session=0, seqNo=0) 2025-11-03T14:04:01.141212Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:01.151801Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:138:2162], cookie=8485732911443537745, session=1) 2025-11-03T14:04:01.151862Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:139:2163], cookie=1858544938216357215, session=0, seqNo=0) 2025-11-03T14:04:01.151884Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:01.162601Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:139:2163], cookie=1858544938216357215, session=2) 2025-11-03T14:04:01.162819Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-11-03T14:04:01.162854Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-11-03T14:04:01.162868Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-11-03T14:04:01.173619Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=111) 2025-11-03T14:04:01.173705Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:138:2162], cookie=112, session=1, semaphore="Lock2" count=1) 2025-11-03T14:04:01.173744Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-11-03T14:04:01.173761Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-11-03T14:04:01.184752Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:138:2162], cookie=112) 2025-11-03T14:04:01.184834Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:139:2163], cookie=222, session=2, semaphore="Lock1" count=1) 2025-11-03T14:04:01.184879Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:139:2163], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-11-03T14:04:01.195469Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:139:2163], cookie=222) 2025-11-03T14:04:01.195489Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:139:2163], cookie=223) 2025-11-03T14:04:01.195541Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:139:2163], cookie=333, session=2, semaphore="Lock1" count=1) 2025-11-03T14:04:01.195594Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:139:2163], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2025-11-03T14:04:01.206278Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:139:2163], cookie=333) 2025-11-03T14:04:01.206306Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:139:2163], cookie=334) 2025-11-03T14:04:01.628778Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:01.645733Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:02.002040Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:02.012982Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:02.358603Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:02.369621Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:02.715595Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:02.726525Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:03.093062Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:03.103855Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:03.439377Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:03.450350Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:03.785327Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:03.796273Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:04.141764Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:04.161976Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:04.517924Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:04.528938Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:04.905738Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:04.916633Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:05.272283Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:05.283412Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:05.641662Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:05.652683Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:06.001834Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:06.013105Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:06.379304Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:06.390320Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:06.790836Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:06.803004Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:07.180273Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:07.191997Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:07.540632Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:07.552393Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:07.908910Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:07.919958Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:08.295689Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:08.306696Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:08.707584Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:08.718577Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:09.068512Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927 ... 14:04:43.551710Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:43.958811Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:43.969797Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:44.334061Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:44.345066Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:44.722647Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:44.739100Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:45.114054Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:45.130677Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:45.510795Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:45.526332Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:45.929153Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:45.940744Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:46.296584Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:46.307667Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:46.685677Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:46.698927Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:47.042605Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:47.053903Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:47.407034Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:47.418550Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:47.758691Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:47.769854Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:48.118956Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:48.130239Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:48.469293Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:48.480407Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:48.820877Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:48.835110Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:49.186940Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:49.197996Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:49.649241Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:30: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2025-11-03T14:04:49.649283Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-11-03T14:04:49.660391Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:71: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2025-11-03T14:04:49.681023Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:599:2536], cookie=13567802203820675518) 2025-11-03T14:04:49.681077Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:599:2536], cookie=13567802203820675518) 2025-11-03T14:04:49.681165Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:602:2539], cookie=14615452079469972633) 2025-11-03T14:04:49.681174Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:602:2539], cookie=14615452079469972633) 2025-11-03T14:04:49.681255Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:605:2542], cookie=16119670016703094932, name="Lock1") 2025-11-03T14:04:49.681269Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:605:2542], cookie=16119670016703094932) 2025-11-03T14:04:49.681343Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:608:2545], cookie=6969451528055767614, name="Lock1") 2025-11-03T14:04:49.681350Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:608:2545], cookie=6969451528055767614) 2025-11-03T14:04:49.873809Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:49.873845Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:49.877904Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:49.877944Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:49.902715Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:49.902883Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2160], cookie=10879654608867624219, session=0, seqNo=0) 2025-11-03T14:04:49.902923Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:49.913780Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2160], cookie=10879654608867624219, session=1) 2025-11-03T14:04:49.913869Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2160], cookie=5136875505273040441, session=0, seqNo=0) 2025-11-03T14:04:49.913906Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2025-11-03T14:04:49.924803Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2160], cookie=5136875505273040441, session=2) 2025-11-03T14:04:49.924896Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:136:2160], cookie=12328150256300712036, session=0, seqNo=0) 2025-11-03T14:04:49.924931Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 3 2025-11-03T14:04:49.936293Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:136:2160], cookie=12328150256300712036, session=3) 2025-11-03T14:04:49.936460Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:149:2171], cookie=3467097732651417153, name="Sem1", limit=3) 2025-11-03T14:04:49.936502Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2025-11-03T14:04:49.947981Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:149:2171], cookie=3467097732651417153) 2025-11-03T14:04:49.948079Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2160], cookie=111, session=1, semaphore="Sem1" count=2) 2025-11-03T14:04:49.948123Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-11-03T14:04:49.948165Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2160], cookie=222, session=2, semaphore="Sem1" count=2) 2025-11-03T14:04:49.948209Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:136:2160], cookie=333, session=3, semaphore="Sem1" count=1) 2025-11-03T14:04:49.959113Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2160], cookie=111) 2025-11-03T14:04:49.959145Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2160], cookie=222) 2025-11-03T14:04:49.959153Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:136:2160], cookie=333) 2025-11-03T14:04:49.959299Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:158:2180], cookie=11560534935688816142, name="Sem1") 2025-11-03T14:04:49.959321Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:158:2180], cookie=11560534935688816142) 2025-11-03T14:04:49.959390Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:161:2183], cookie=12251235503084536519, name="Sem1") 2025-11-03T14:04:49.959400Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:161:2183], cookie=12251235503084536519) 2025-11-03T14:04:49.959437Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:136:2160], cookie=444, name="Sem1") 2025-11-03T14:04:49.959464Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-11-03T14:04:49.959477Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-11-03T14:04:49.959485Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-11-03T14:04:49.981885Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:136:2160], cookie=444) 2025-11-03T14:04:49.982064Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:166:2188], cookie=8511608355936917578, name="Sem1") 2025-11-03T14:04:49.982089Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:166:2188], cookie=8511608355936917578) 2025-11-03T14:04:49.982151Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:169:2191], cookie=9276919923474986513, name="Sem1") 2025-11-03T14:04:49.982158Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:169:2191], cookie=9276919923474986513) ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes [GOOD] Test command err: 2025-11-03T14:04:06.430895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:06.458203Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:530:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-11-03T14:04:06.458292Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-11-03T14:04:06.458374Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/ud3d/00207c/r3tmp/tmpcbtxRj/pdisk_1.dat 2025-11-03T14:04:06.599970Z node 1 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:06.644839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:06.644885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:06.689070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2067, node 1 2025-11-03T14:04:06.791934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-11-03T14:04:06.791961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2025-11-03T14:04:06.791967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2025-11-03T14:04:06.792098Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2025-11-03T14:04:06.795739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2025-11-03T14:04:06.833341Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions TClient is connected to server localhost:6375 2025-11-03T14:04:07.159341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2025-11-03T14:04:08.483546Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:08.483661Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:123: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2025-11-03T14:04:08.487252Z node 2 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 2 2025-11-03T14:04:08.487279Z node 3 :STATISTICS INFO: service_impl.cpp:232: Subscribed for config changes on node 3 2025-11-03T14:04:08.488718Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:08.488914Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2025-11-03T14:04:08.499273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:08.499302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:08.499353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:08.499360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:08.523066Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-11-03T14:04:08.523133Z node 1 :HIVE WARN: hive_impl.cpp:811: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-11-03T14:04:08.524186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:08.524334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:08.631938Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-11-03T14:04:08.631976Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-11-03T14:04:08.654286Z node 2 :HIVE WARN: hive_impl.cpp:811: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-11-03T14:04:08.654715Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-11-03T14:04:08.714894Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2025-11-03T14:04:08.726972Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:04:08.727237Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:04:08.727531Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:04:08.727805Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:04:08.727861Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:04:08.727887Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:04:08.727953Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:04:08.728004Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:04:08.728034Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-11-03T14:04:08.797474Z node 3 :IMPORT WARN: schemeshard_import.cpp:307: Table profiles were not loaded 2025-11-03T14:04:08.806531Z node 3 :STATISTICS INFO: aggregator_impl.cpp:42: [72075186224037894] OnActivateExecutor 2025-11-03T14:04:08.806557Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2025-11-03T14:04:08.813316Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2025-11-03T14:04:08.813745Z node 3 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2025-11-03T14:04:08.813772Z node 3 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-11-03T14:04:08.813777Z node 3 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-11-03T14:04:08.813781Z node 3 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-11-03T14:04:08.813786Z node 3 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-11-03T14:04:08.813791Z node 3 :STATISTICS DEBUG: tx_init.cpp:272: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-11-03T14:04:08.813800Z node 3 :STATISTICS DEBUG: tx_init.cpp:279: [72075186224037894] TTxInit::Complete 2025-11-03T14:04:08.813974Z node 3 :STATISTICS INFO: aggregator_impl.cpp:68: [72075186224037894] Subscribed for config changes 2025-11-03T14:04:08.819639Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8310: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-11-03T14:04:08.819672Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8340: ConnectToSA(), pipe client id: [3:2270:2548], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-11-03T14:04:08.821931Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [3:2285:2557] 2025-11-03T14:04:08.822083Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:219: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2285:2557], schemeshard id = 72075186224037897 2025-11-03T14:04:08.868540Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [3:2330:2569] 2025-11-03T14:04:08.869399Z node 3 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-11-03T14:04:08.874250Z node 3 :STATISTICS DEBUG: table_creator.cpp:160: Table _statistics updater. Describe result: PathErrorUnknown 2025-11-03T14:04:08.874275Z node 3 :STATISTICS NOTICE: table_creator.cpp:180: Table _statistics updater. Creating table 2025-11-03T14:04:08.874288Z node 3 :STATISTICS DEBUG: table_creator.cpp:103: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-11-03T14:04:08.877131Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:306: Path does not belong to the specified domain: self# [3:2429:2615], domain# [OwnerId: 72057594046644480, LocalPathId: 2], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2025-11-03T14:04:08.877979Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at /home/runner/actions_runner/_work/ydb/ydb/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:690) 2025-11-03T14:04:08.879669Z node 3 :STATISTICS DEBUG: table_creator.cpp:203: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976725657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-11-03T14:04:08.879701Z node 3 :STATISTICS DEBUG: table_creator.cpp:274: Table _statistics updater. Subscribe on create table tx: 281474976725657 2025-11-03T14:04:08.894496Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:135: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2025-11-03T14:04:08.926222Z node 2 :KQP_PROXY WARN: kqp_fi ... Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-11-03T14:04:44.611326Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:752: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-11-03T14:04:44.611452Z node 3 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [3:4085:3074], ActorId: [3:4086:3075], Starting query actor #1 [3:4087:3076] 2025-11-03T14:04:44.611466Z node 3 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [3:4086:3075], ActorId: [3:4087:3076], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-11-03T14:04:44.615817Z node 3 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [3:4086:3075], ActorId: [3:4087:3076], RunDataQuery with SessionId: ydb://session/3?node_id=3&id=ODIyZDlhNS1hN2YzYjM0YS1hODYwZDhmZC05MjU2ZTE1ZA==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-11-03T14:04:44.624587Z node 3 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [3:4096:3085]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:44.624676Z node 3 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-11-03T14:04:44.624693Z node 3 :STATISTICS DEBUG: service_impl.cpp:1221: ConnectToSA(), pipe client id = [3:4098:3087] 2025-11-03T14:04:44.624708Z node 3 :STATISTICS DEBUG: service_impl.cpp:1250: SyncNode(), pipe client id = [3:4098:3087] 2025-11-03T14:04:44.624816Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:92: [72075186224037894] EvServerConnected, pipe server id = [3:4099:3088] 2025-11-03T14:04:44.624852Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:139: [72075186224037894] EvConnectNode, pipe server id = [3:4099:3088], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2025-11-03T14:04:44.624862Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:333: [72075186224037894] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2025-11-03T14:04:44.624903Z node 3 :STATISTICS DEBUG: service_impl.cpp:1088: EvClientConnected, node id = 3, client id = [3:4098:3087], server id = [3:4099:3088], tablet id = 72075186224037894, status = OK 2025-11-03T14:04:44.624918Z node 3 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 3 cookie: 18446744073709551615 2025-11-03T14:04:44.624933Z node 3 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 1, ReplyToActorId = [3:4096:3085], StatRequests.size() = 1 2025-11-03T14:04:44.624984Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2025-11-03T14:04:44.648575Z node 3 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [3:4086:3075], ActorId: [3:4087:3076], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=ODIyZDlhNS1hN2YzYjM0YS1hODYwZDhmZC05MjU2ZTE1ZA==, TxId: 2025-11-03T14:04:44.648606Z node 3 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [3:4086:3075], ActorId: [3:4087:3076], Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=ODIyZDlhNS1hN2YzYjM0YS1hODYwZDhmZC05MjU2ZTE1ZA==, TxId: 2025-11-03T14:04:44.648701Z node 3 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [3:4085:3074], ActorId: [3:4086:3075], Got response [3:4087:3076] SUCCESS 2025-11-03T14:04:44.648865Z node 3 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-11-03T14:04:44.660695Z node 3 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-11-03T14:04:44.660722Z node 3 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-11-03T14:04:44.759509Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:223: [72075186224037894] EvFastPropagateCheck 2025-11-03T14:04:44.759546Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:378: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-11-03T14:04:44.815770Z node 3 :STATISTICS DEBUG: service_impl.cpp:1191: EvRequestTimeout, pipe client id = [3:4098:3087], schemeshard count = 1 2025-11-03T14:04:45.196985Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4128:2701]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:45.197116Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2025-11-03T14:04:45.197129Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 31, ReplyToActorId = [2:4128:2701], StatRequests.size() = 1 2025-11-03T14:04:46.316481Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4165:2707]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:46.316610Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2025-11-03T14:04:46.316620Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 32, ReplyToActorId = [2:4165:2707], StatRequests.size() = 1 2025-11-03T14:04:46.891849Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:304: [72075186224037894] EvPropagateTimeout 2025-11-03T14:04:46.892017Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-11-03T14:04:46.892028Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-11-03T14:04:46.892041Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:832: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-11-03T14:04:46.892048Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:752: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-11-03T14:04:46.892173Z node 3 :STATISTICS DEBUG: query_actor.h:291: [TQueryRetryActor] OwnerId: [3:4194:3124], ActorId: [3:4195:3125], Starting query actor #1 [3:4196:3126] 2025-11-03T14:04:46.892191Z node 3 :STATISTICS DEBUG: query_actor.cpp:134: [TQueryBase] OwnerId: [3:4195:3125], ActorId: [3:4196:3126], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2025-11-03T14:04:46.893289Z node 3 :STATISTICS DEBUG: query_actor.cpp:200: [TQueryBase] OwnerId: [3:4195:3125], ActorId: [3:4196:3126], RunDataQuery with SessionId: ydb://session/3?node_id=3&id=ODNiYTRkYmUtNWU0ZmQ1NTgtOGRjYjFjMzUtYzU2MjRhNzc=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-11-03T14:04:46.897167Z node 3 :STATISTICS DEBUG: query_actor.cpp:243: [TQueryBase] OwnerId: [3:4195:3125], ActorId: [3:4196:3126], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=ODNiYTRkYmUtNWU0ZmQ1NTgtOGRjYjFjMzUtYzU2MjRhNzc=, TxId: 2025-11-03T14:04:46.897196Z node 3 :STATISTICS DEBUG: query_actor.cpp:370: [TQueryBase] OwnerId: [3:4195:3125], ActorId: [3:4196:3126], Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=ODNiYTRkYmUtNWU0ZmQ1NTgtOGRjYjFjMzUtYzU2MjRhNzc=, TxId: 2025-11-03T14:04:46.897287Z node 3 :STATISTICS DEBUG: query_actor.h:310: [TQueryRetryActor] OwnerId: [3:4194:3124], ActorId: [3:4195:3125], Got response [3:4196:3126] SUCCESS 2025-11-03T14:04:46.897370Z node 3 :STATISTICS DEBUG: tx_finish_trasersal.cpp:26: [72075186224037894] TTxFinishTraversal::Execute 2025-11-03T14:04:46.921133Z node 3 :STATISTICS DEBUG: tx_finish_trasersal.cpp:36: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-11-03T14:04:46.921163Z node 3 :STATISTICS DEBUG: tx_finish_trasersal.cpp:39: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-11-03T14:04:47.398665Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4227:2715]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:47.398810Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2025-11-03T14:04:47.398822Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 33, ReplyToActorId = [2:4227:2715], StatRequests.size() = 1 2025-11-03T14:04:48.536260Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4270:2723]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:48.536406Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2025-11-03T14:04:48.536415Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 34, ReplyToActorId = [2:4270:2723], StatRequests.size() = 1 2025-11-03T14:04:48.998457Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:349: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2025-11-03T14:04:48.998659Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:685: [72075186224037894] ScheduleNextTraversal 2025-11-03T14:04:48.998672Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:694: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-11-03T14:04:48.998828Z node 2 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 2 cookie: 7 2025-11-03T14:04:48.999011Z node 3 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 3 cookie: 0 2025-11-03T14:04:48.999036Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:253: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2025-11-03T14:04:48.999103Z node 1 :STATISTICS DEBUG: service_impl.cpp:939: EvPropagateStatistics, node id: 1 cookie: 0 2025-11-03T14:04:49.020096Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8442: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2025-11-03T14:04:49.020131Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8279: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2025-11-03T14:04:49.020219Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2025-11-03T14:04:49.032112Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2025-11-03T14:04:49.390947Z node 2 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4307:2731]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:49.391091Z node 2 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2025-11-03T14:04:49.391103Z node 2 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 35, ReplyToActorId = [2:4307:2731], StatRequests.size() = 1 2025-11-03T14:04:49.391209Z node 3 :STATISTICS DEBUG: service_impl.cpp:771: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [3:4309:3167]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-11-03T14:04:49.392177Z node 3 :STATISTICS DEBUG: service_impl.cpp:788: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-11-03T14:04:49.392198Z node 3 :STATISTICS DEBUG: service_impl.cpp:1262: ReplySuccess(), request id = 2, ReplyToActorId = [3:4309:3167], StatRequests.size() = 1 >> KqpQueryServiceScripts::ExecuteScript >> KqpQueryService::ExecuteCollectMeta ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2025-11-03T14:04:49.841786Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-11-03T14:04:49.841971Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-11-03T14:04:49.842057Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-11-03T14:04:49.842180Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-11-03T14:04:49.853586Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-11-03T14:04:49.856344Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-11-03T14:04:49.856382Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-11-03T14:04:49.856761Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:04:49.856980Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-11-03T14:04:49.857064Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:49.857072Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:49.857109Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-11-03T14:04:49.861696Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-11-03T14:04:49.861763Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-11-03T14:04:49.861777Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-11-03T14:04:49.861815Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:49.861832Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:49.861847Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:49.886966Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:49.890989Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-11-03T14:04:49.891061Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:49.892900Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-11-03T14:04:49.924809Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:49.924853Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:49.936466Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:49.936518Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:49.936534Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:49.936548Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:49.936573Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:49.936583Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:49.936590Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:49.936600Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:49.947282Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:49.947320Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:49.958023Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:49.958057Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-11-03T14:04:49.958337Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-11-03T14:04:49.958347Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-11-03T14:04:49.960771Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-11-03T14:04:49.960786Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-11-03T14:04:49.961138Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-11-03T14:04:49.961159Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-11-03T14:04:49.961541Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ud3d/001ec0/r3tmp/tmpH9qqYq/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-11-03T14:04:49.961623Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ud3d/001ec0/r3tmp/tmpH9qqYq/pdisk_1.dat 2025-11-03T14:04:49.961633Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/ud3d/001ec0/r3tmp/tmpH9qqYq/pdisk_1.dat 2025-11-03T14:04:49.961803Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-11-03T14:04:49.961813Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-11-03T14:04:49.961878Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-11-03T14:04:49.961895Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:49.961907Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-11-03T14:04:49.961943Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-11-03T14:04:49.961960Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-11-03T14:04:49.962284Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-11-03T14:04:49.962326Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:49.973378Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-11-03T14:04:49.973438Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-11-03T14:04:49.975664Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-11-03T14:04:49.975823Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ud3d/001ec0/r3tmp/tmpH9qqYq/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-11-03T14:04:49.975965Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ud3d/001ec0/r3tmp/tmpH9qqYq/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ud3d/001ec0/r3tmp/tmpH9qqYq/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10673613758632448451 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-11-03T14:04:49.976123Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:49.976196Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:49.976204Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:49.976238Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:49.976250Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:49.977945Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:49.977996Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:49.978013Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:49.978028Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:49.978048Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:49.978123Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:49.978132Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:49.978137Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:49.978152Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:430:2119] 2025-11-03T14:04:49.978872Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:49.978893Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:49.978897Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:49.978913Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:432:2317] 2025-11-03T14:04:49.979272Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:49.979283Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:419:2313] 2025-11-03T14:04:49.979303Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:49.979308Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:420:2115] 2025-11-03T14:04:49.979503Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:432:2317]} 2025-11-03T14:04:49.979550Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:49.979559Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:49.979562Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:49.979684Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:430:2119]} 2025-11-03T14:04:49.979729Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:49.979736Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:49.979739Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:49.995094Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-11-03T14:04:50.009832Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:50.009853Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-11-03T14:04:50.009863Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:50.009868Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. >> KqpQueryService::StreamExecuteCollectMeta ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] Test command err: 2025-11-03T14:04:49.907928Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-11-03T14:04:49.908173Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-11-03T14:04:49.908278Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-11-03T14:04:49.908439Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-11-03T14:04:49.921250Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-11-03T14:04:49.925684Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-11-03T14:04:49.925730Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-11-03T14:04:49.926133Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:04:49.926353Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-11-03T14:04:49.926460Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:49.926467Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:49.926508Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-11-03T14:04:49.931732Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-11-03T14:04:49.931845Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-11-03T14:04:49.931861Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-11-03T14:04:49.931897Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:49.931913Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:49.931925Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:49.955295Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:49.956308Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-11-03T14:04:49.956365Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:49.956939Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-11-03T14:04:49.989292Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:49.989344Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:50.002315Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:50.002383Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:50.002402Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:50.002417Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:50.002461Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:50.002472Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:50.002479Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:50.002488Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:50.013353Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:50.013411Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:50.024637Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:50.024691Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-11-03T14:04:50.024942Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-11-03T14:04:50.024952Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-11-03T14:04:50.029664Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-11-03T14:04:50.029694Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-11-03T14:04:50.030339Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-11-03T14:04:50.030369Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-11-03T14:04:50.030902Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ud3d/001ec1/r3tmp/tmpoBba35/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-11-03T14:04:50.031024Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ud3d/001ec1/r3tmp/tmpoBba35/pdisk_1.dat 2025-11-03T14:04:50.031035Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/ud3d/001ec1/r3tmp/tmpoBba35/pdisk_1.dat 2025-11-03T14:04:50.031275Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-11-03T14:04:50.031293Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-11-03T14:04:50.031390Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-11-03T14:04:50.031418Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:50.031433Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-11-03T14:04:50.031483Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-11-03T14:04:50.031511Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-11-03T14:04:50.032273Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-11-03T14:04:50.032331Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:50.043632Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-11-03T14:04:50.043699Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-11-03T14:04:50.045661Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-11-03T14:04:50.045826Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ud3d/001ec1/r3tmp/tmpoBba35/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-11-03T14:04:50.046008Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ud3d/001ec1/r3tmp/tmpoBba35/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ud3d/001ec1/r3tmp/tmpoBba35/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7357248163647170701 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-11-03T14:04:50.046330Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:50.046399Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:50.046405Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:50.046446Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:50.046456Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:50.048005Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:50.048042Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:50.048054Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:50.048068Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:50.048088Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:50.048173Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:50.048184Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:50.048189Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:50.048207Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:430:2119] 2025-11-03T14:04:50.048604Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:50.048614Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:50.048618Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:50.048628Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:432:2317] 2025-11-03T14:04:50.048904Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:50.048914Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:419:2313] 2025-11-03T14:04:50.048931Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:50.048935Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:420:2115] 2025-11-03T14:04:50.049103Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:432:2317]} 2025-11-03T14:04:50.049138Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:50.049145Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:50.049148Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:50.049239Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:430:2119]} 2025-11-03T14:04:50.049271Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:50.049276Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:50.049278Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:50.067707Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-11-03T14:04:50.093713Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:50.093734Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-11-03T14:04:50.093741Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:50.093746Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. |57.0%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIteratorVectorTopK::BadRequest [GOOD] >> DataShardReadIteratorVectorTopK::Simple+Batch >> KqpQueryService::ExecuteQueryWithWorkloadManager [GOOD] >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier >> KqpRm::SnapshotSharingByExchanger [GOOD] >> KqpService::SwitchCache+UseCache [GOOD] >> KqpService::SwitchCache-UseCache |57.0%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite >> DataShardReadIterator::ShouldReadRangeOneByOne [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix1 >> Viewer::JsonStorageListingV2NodeIdFilter [GOOD] >> Viewer::JsonStorageListingV2PDiskIdFilter >> KqpQueryServiceScripts::ForgetScriptExecution [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 >> KqpRm::SingleSnapshotByExchanger [GOOD] >> KqpQueryService::ExecuteQueryExplicitTxTLI >> Viewer::JsonStorageListingV1NodeIdFilter [GOOD] >> Viewer::JsonStorageListingV1PDiskIdFilter ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2025-11-03T14:04:47.532295Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-11-03T14:04:47.532481Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-11-03T14:04:47.532557Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-11-03T14:04:47.532720Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-11-03T14:04:47.541552Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-11-03T14:04:47.542613Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-11-03T14:04:47.542652Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-11-03T14:04:47.542950Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:04:47.543125Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-11-03T14:04:47.543183Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:47.543189Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:47.543224Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-11-03T14:04:47.545522Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-11-03T14:04:47.545578Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-11-03T14:04:47.545592Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-11-03T14:04:47.545625Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:47.545638Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:47.545647Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:47.567995Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:47.568942Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-11-03T14:04:47.569008Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:47.569655Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-11-03T14:04:47.603103Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:47.603161Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:47.614321Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:47.614371Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:47.614388Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:47.614405Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:47.614429Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:47.614438Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:47.614471Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:47.614481Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:47.625345Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:47.625393Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:47.636172Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:47.636208Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-11-03T14:04:47.636363Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-11-03T14:04:47.636368Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-11-03T14:04:47.637922Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-11-03T14:04:47.637936Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-11-03T14:04:47.638245Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-11-03T14:04:47.638260Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-11-03T14:04:47.638556Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ud3d/001ed8/r3tmp/tmpJXfwLa/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-11-03T14:04:47.638645Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ud3d/001ed8/r3tmp/tmpJXfwLa/pdisk_1.dat 2025-11-03T14:04:47.638654Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/ud3d/001ed8/r3tmp/tmpJXfwLa/pdisk_1.dat 2025-11-03T14:04:47.638802Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-11-03T14:04:47.638810Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-11-03T14:04:47.638872Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-11-03T14:04:47.638887Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:47.638896Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-11-03T14:04:47.638925Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-11-03T14:04:47.638940Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-11-03T14:04:47.642693Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-11-03T14:04:47.642804Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:47.653956Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-11-03T14:04:47.654013Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-11-03T14:04:47.656601Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-11-03T14:04:47.657240Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ud3d/001ed8/r3tmp/tmpJXfwLa/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-11-03T14:04:47.657433Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ud3d/001ed8/r3tmp/tmpJXfwLa/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ud3d/001ed8/r3tmp/tmpJXfwLa/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15760446843293490083 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-11-03T14:04:47.657639Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:47.657714Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:47.657721Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:47.657757Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:47.657770Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:47.659778Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:47.659840Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:47.659864Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:47.659884Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:47.659911Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:47.660001Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:47.660015Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:47.660020Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:47.660037Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:430:2119] 2025-11-03T14:04:47.660537Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:47.660555Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:47.660559Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:47.660571Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:432:2317] 2025-11-03T14:04:47.660936Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:47.660948Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:419:2313] 2025-11-03T14:04:47.660971Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:47.660976Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:420:2115] 2025-11-03T14:04:47.661194Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:432:2317]} 2025-11-03T14:04:47.661254Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:47.661265Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:47.661268Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:47.661393Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:430:2119]} 2025-11-03T14:04:47.661443Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:47.661451Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:47.661454Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:47.682554Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-11-03T14:04:47.702361Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:47.702389Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-11-03T14:04:47.702401Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:47.702408Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-11-03T14:04:47.781500Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal SlotSizeInUnits: 0 } } 2025-11-03T14:04:47.825387Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError SlotSizeInUnits: 0 } } 2025-11-03T14:04:47.866486Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } } 2025-11-03T14:04:48.148448Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-11-03T14:04:48.151217Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-11-03T14:04:48.151358Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse ------- [TS] {default-linux-x86_64, relwithdebinfo} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2025-11-03T14:04:49.824704Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:189:2106] Bootstrap 2025-11-03T14:04:49.825000Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:189:2106] Become StateWork (SchemeCache [2:196:2109]) 2025-11-03T14:04:49.825155Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:188:2157] Bootstrap 2025-11-03T14:04:49.825354Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:188:2157] Become StateWork (SchemeCache [1:200:2163]) 2025-11-03T14:04:49.839604Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-11-03T14:04:49.846006Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-11-03T14:04:49.846060Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-11-03T14:04:49.846418Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-11-03T14:04:49.846654Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-11-03T14:04:49.846747Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2091} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:49.846755Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2025-11-03T14:04:49.846802Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-11-03T14:04:49.850604Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-11-03T14:04:49.850697Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-11-03T14:04:49.850710Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-11-03T14:04:49.850746Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:49.850762Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-11-03T14:04:49.850775Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:49.875786Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:188:2157] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:49.876778Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976710656 RangeEnd# 281474976715656 txAllocator# 72057594046447617 2025-11-03T14:04:49.876841Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:189:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-11-03T14:04:49.877417Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-11-03T14:04:49.910854Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-11-03T14:04:49.910909Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:49.921665Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-11-03T14:04:49.921721Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:49.921733Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-11-03T14:04:49.921742Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:49.921760Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-11-03T14:04:49.921766Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:49.921771Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-11-03T14:04:49.921777Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:49.932598Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-11-03T14:04:49.932643Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:49.945881Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-11-03T14:04:49.945932Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2025-11-03T14:04:49.946239Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2025-11-03T14:04:49.946250Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2222} LoadFinished 2025-11-03T14:04:49.948442Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-11-03T14:04:49.948459Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2025-11-03T14:04:49.948817Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-11-03T14:04:49.948842Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2025-11-03T14:04:49.949195Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:419} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runner/.ya/build/build_root/ud3d/001ec6/r3tmp/tmpwVhGZ6/pdisk_1.dat" } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } } } } 2025-11-03T14:04:49.949259Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1 Path# /home/runner/.ya/build/build_root/ud3d/001ec6/r3tmp/tmpwVhGZ6/pdisk_1.dat 2025-11-03T14:04:49.949265Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /home/runner/.ya/build/build_root/ud3d/001ec6/r3tmp/tmpwVhGZ6/pdisk_1.dat 2025-11-03T14:04:49.949408Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 2 DeclarativePDiskManagement: true } 2025-11-03T14:04:49.949416Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 2 Update# {} Comprehensive# true 2025-11-03T14:04:49.949483Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } State: Initial Replicated: false DiskSpace: Green } } 2025-11-03T14:04:49.949499Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:49.949509Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-11-03T14:04:49.949537Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-11-03T14:04:49.949557Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 0 PDiskGuid: 123 Status: READY OnlyPhantomsRemain: false } } 2025-11-03T14:04:49.950068Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-11-03T14:04:49.950126Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } 2025-11-03T14:04:49.961224Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-11-03T14:04:49.961289Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 2 Devices# [] 2025-11-03T14:04:49.965154Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-11-03T14:04:49.965346Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2960} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/ud3d/001ec6/r3tmp/tmpwVhGZ6/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-11-03T14:04:49.965485Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/ud3d/001ec6/r3tmp/tmpwVhGZ6/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/ud3d/001ec6/r3tmp/tmpwVhGZ6/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6718738654433410533 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2025-11-03T14:04:49.965693Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:49.965769Z node 1 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:49.965776Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:826: TTenantPool::Bootstrap 2025-11-03T14:04:49.965815Z node 2 :LOCAL DEBUG: local.cpp:1540: TLocal::Bootstrap 2025-11-03T14:04:49.965828Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:49.967994Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:49.968050Z node 2 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:49.968069Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:412: TDomainTenantPool(dc-1) Bootstrap 2025-11-03T14:04:49.968083Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:286: TDomainTenantPool(dc-1) send request to add tenant /dc-1 with resources CPU: 1 Memory: 1 Network: 1 2025-11-03T14:04:49.968105Z node 1 :LOCAL DEBUG: local.cpp:1490: TDomainLocal(dc-1): Bootstrap 2025-11-03T14:04:49.968192Z node 2 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:49.968201Z node 2 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:49.968206Z node 2 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:49.968223Z node 2 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[2:430:2119] 2025-11-03T14:04:49.968751Z node 1 :LOCAL DEBUG: local.cpp:1198: TDomainLocal(dc-1): Binding to hive 72057594046578946 at domain dc-1 (allocated resources: CPU: 1 Memory: 1 Network: 1) 2025-11-03T14:04:49.968759Z node 1 :LOCAL DEBUG: local.cpp:1005: TLocalNodeRegistrar::Bootstrap 2025-11-03T14:04:49.968764Z node 1 :LOCAL DEBUG: local.cpp:183: TLocalNodeRegistrar::TryToRegister 2025-11-03T14:04:49.968773Z node 1 :LOCAL DEBUG: local.cpp:216: TLocalNodeRegistrar::TryToRegister pipe to hive, pipe:[1:432:2317] 2025-11-03T14:04:49.969168Z node 1 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:49.969176Z node 1 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [1:419:2313] 2025-11-03T14:04:49.969193Z node 2 :TENANT_POOL NOTICE: tenant_pool.cpp:526: TDomainTenantPool(dc-1) started tenant /dc-1 2025-11-03T14:04:49.969198Z node 2 :TENANT_POOL DEBUG: tenant_pool.cpp:274: TDomainTenantPool(dc-1) send status update to [2:420:2115] 2025-11-03T14:04:49.969400Z node 1 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[1:432:2317]} 2025-11-03T14:04:49.969447Z node 1 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:49.969455Z node 1 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:49.969458Z node 1 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:49.969570Z node 2 :LOCAL DEBUG: local.cpp:263: TEvTabletPipe::TEvClientConnected {TabletId=72057594046578946 Status=OK ClientId=[2:430:2119]} 2025-11-03T14:04:49.969615Z node 2 :LOCAL DEBUG: local.cpp:327: TLocalNodeRegistrar::Handle TEvLocal::TEvPing 2025-11-03T14:04:49.969640Z node 2 :LOCAL DEBUG: local.cpp:383: TLocalNodeRegistrar TEvPing - CONNECTED 2025-11-03T14:04:49.969643Z node 2 :LOCAL DEBUG: local.cpp:300: TLocalNodeRegistrar SendStatusOk 2025-11-03T14:04:49.987432Z node 1 :BS_CONTROLLER DEBUG: {BSC19@console_interaction.cpp:74} Console proposed config response Response# {Status: ReverseCommit ConsoleConfigVersion: 0 YAML: "" } 2025-11-03T14:04:50.003016Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:50.003039Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-11-03T14:04:50.003050Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2025-11-03T14:04:50.003057Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2025-11-03T14:04:50.080342Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34225520640 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 34158411776 State: Normal SlotSizeInUnits: 0 } } 2025-11-03T14:04:50.121658Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1000 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: OpenFileError SlotSizeInUnits: 0 } } 2025-11-03T14:04:50.162834Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 AvailableSize: 34158411776 AllocatedSize: 0 StatusFlags: 1 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Occupancy: 0.00098231827111984276 State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 1000 } } 2025-11-03T14:04:50.466178Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } 2025-11-03T14:04:50.475457Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } Success: true } 2025-11-03T14:04:50.475589Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 0 } } >> KqpQueryServiceScripts::ExecuteScriptWithTimeout >> KqpQueryService::ExecuteCollectMeta [GOOD] >> KqpQueryService::ExecuteQuery >> KqpQueryService::ReturnAndCloseSameTime >> DataShardReadIterator::ShouldReverseReadMultipleRanges [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks >> DataShardReadIterator::ShouldReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionStealingSameKey >> KqpQueryService::SessionFromPoolError >> KqpQueryService::StreamExecuteCollectMeta [GOOD] >> KqpQueryService::ShowCreateView >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex >> KqpQueryServiceScripts::ExecuteScript [GOOD] >> KqpQueryServiceScripts::ExecuteMultiScript >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey |57.1%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIteratorConsistency::WriteLockUncommittedConflictFailure [GOOD] >> DataShardReadIteratorFastCancel::ShouldProcessFastCancel >> TKesusTest::TestSessionStealingDifferentKey [GOOD] >> DataShardReadIteratorVectorTopK::Simple+Batch [GOOD] >> DataShardReadIteratorVectorTopK::Simple-Batch >> KqpQueryServiceScripts::ParseScript >> KqpQueryService::ExecuteQueryExplicitTxTLI [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTx >> DataShardReadIterator::ShouldReadRangePrefix1 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite ------- [TM] {default-linux-x86_64, relwithdebinfo} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2025-11-03T14:04:00.856397Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:00.856431Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:00.859673Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:00.859740Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:00.891593Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:00.891769Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:137:2162], cookie=3752857304783889306, session=0, seqNo=0) 2025-11-03T14:04:00.891807Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:00.902667Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:137:2162], cookie=3752857304783889306, session=1) 2025-11-03T14:04:00.902840Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[1:137:2162], cookie=17743782146077762525 2025-11-03T14:04:00.902925Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:151:2173], cookie=14497414886740232417) 2025-11-03T14:04:00.902941Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:151:2173], cookie=14497414886740232417) 2025-11-03T14:04:01.320544Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:01.331546Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:01.672289Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:01.683141Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:02.029212Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:02.040036Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:02.386135Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:02.397058Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:02.753170Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:02.764074Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:03.110538Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:03.121223Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:03.456429Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:03.467404Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:03.812148Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:03.823031Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:04.168324Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:04.179287Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:04.555871Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:04.566738Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:04.932138Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:04.942963Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:05.300011Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:05.310810Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:05.662950Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:05.673701Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:06.040413Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:06.051376Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:06.444878Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:06.455855Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:06.830509Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:06.842075Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:07.207645Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:07.219523Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:07.576787Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:07.587970Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:07.954379Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:07.967750Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:08.341027Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:08.352533Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:08.704647Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:08.715683Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:09.071559Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:09.082611Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:09.440341Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:09.451281Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:09.813411Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:09.824574Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:10.185246Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:10.197263Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:10.567181Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:10.578140Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:10.930137Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:10.942974Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:11.336857Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:11.347904Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:11.714087Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:11.728412Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:12.145005Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:12.155787Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:12.516024Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:12.534951Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:12.923457Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:12.934409Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:13.294432Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:13.307628Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:13.670868Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:13.687708Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:14.078830Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:14.099826Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:14.523272Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:14.539036Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:14.950148Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:14.974959Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:15.401072Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:15.419261Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:15.841336Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:15.867119Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:16.278641Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:16.292216Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:16.651456Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:16.664777Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:17.051960Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:17.066275Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:17.427390Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:17.448636Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck: ... EBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:38.918899Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:39.301337Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:39.313131Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:39.694798Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:39.710976Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:40.083210Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:40.095087Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:40.446561Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:40.460416Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:40.855358Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:40.866562Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:41.210688Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:41.221622Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:41.570655Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:41.582145Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:41.945656Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:41.957159Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:42.319686Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:42.330992Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:42.714644Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:42.729764Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:43.096901Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:43.111958Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:43.467403Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:43.478670Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:43.728614Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:43.744092Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:44.092935Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:44.103909Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:44.488618Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:44.500611Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:44.860749Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:44.879116Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:45.235995Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:45.254240Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:45.615174Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:45.631617Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:45.984963Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:45.999317Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:46.350279Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:46.362642Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:46.753770Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:46.766785Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:47.107298Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:47.127123Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:47.489793Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:47.501264Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:47.858713Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:47.871665Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:48.366171Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:48.383077Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:48.758135Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:48.775888Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:49.141925Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:49.156287Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:49.517568Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:49.533733Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:49.903314Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:49.916273Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:50.284321Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:50.297923Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:50.676480Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:50.687540Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:51.038230Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:51.050486Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:51.473053Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:51.492754Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:51.886864Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2025-11-03T14:04:51.903201Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2025-11-03T14:04:52.239410Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-11-03T14:04:52.239443Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2025-11-03T14:04:52.250566Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-11-03T14:04:52.261055Z node 2 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[2:644:2570], cookie=794038574028621663) 2025-11-03T14:04:52.261102Z node 2 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[2:644:2570], cookie=794038574028621663) 2025-11-03T14:04:52.585991Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:52.586032Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:52.590797Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:52.590973Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:52.625976Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:52.626198Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:137:2162], cookie=12345, session=0, seqNo=0) 2025-11-03T14:04:52.626238Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:52.639632Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:137:2162], cookie=12345, session=1) 2025-11-03T14:04:52.639805Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:145:2167], cookie=23456, session=1, seqNo=0) 2025-11-03T14:04:52.650688Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:145:2167], cookie=23456, session=1) 2025-11-03T14:04:52.956470Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2025-11-03T14:04:52.956506Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2025-11-03T14:04:52.961103Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2025-11-03T14:04:52.961146Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2025-11-03T14:04:52.983470Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2025-11-03T14:04:52.983703Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2162], cookie=12345, session=0, seqNo=0) 2025-11-03T14:04:52.983744Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2025-11-03T14:04:53.007409Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2162], cookie=12345, session=1) 2025-11-03T14:04:53.007593Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:144:2167], cookie=23456, session=1, seqNo=0) 2025-11-03T14:04:53.018425Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:144:2167], cookie=23456, session=1)